From bf63b0ae1e3323ed9b765465d8d9d0d1332b8ea9 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 16 Nov 2023 13:24:08 -0500 Subject: [PATCH 01/60] perf: Switch LensVM to wasmtime runtime (#2030) ## Relevant issue(s) Resolves #2029 ## Description Switches LensVM to the wasmtime runtime. Should be a fair bit faster than wazero, and supports all the important build targets. Also doesn't suffer from https://github.com/tetratelabs/wazero/issues/1818 Note on [wasmtime-go](https://github.com/bytecodealliance/wasmtime-go): > This Go library uses CGO to consume the C API of the [Wasmtime project](https://github.com/bytecodealliance/wasmtime) which is written in Rust. Precompiled binaries of Wasmtime are checked into this repository on tagged releases so you won't have to install Wasmtime locally, but it means that this project only works on Linux x86_64, macOS x86_64 , and Windows x86_64 currently. Building on other platforms will need to arrange to build Wasmtime and use CGO_* env vars to compile correctly. This is different to wazero, which as a pure Go project supports pretty much anything Go does without messing about. --- .github/workflows/test-and-upload-coverage.yml | 1 + go.mod | 4 ++-- go.sum | 8 ++++---- lens/registry.go | 4 ++-- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index cc3aa84a7e..6535b2df5e 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -49,6 +49,7 @@ jobs: runs-on: ${{ matrix.os }} env: + CGO_ENABLED: 1 DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} DEFRA_CLIENT_HTTP: ${{ matrix.client-type == 'http' }} DEFRA_CLIENT_CLI: ${{ matrix.client-type == 'cli' }} diff --git a/go.mod b/go.mod index dfe905f39e..b9cc9ea219 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,7 @@ require ( github.com/ipfs/go-log v1.0.5 github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 - github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 + github.com/lens-vm/lens/host-go v0.0.0-20231108161805-be145dd591bb github.com/libp2p/go-libp2p v0.32.1 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.1 @@ -57,6 +57,7 @@ require ( github.com/Jorropo/jsync v1.0.1 // indirect github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect + github.com/bytecodealliance/wasmtime-go/v14 v14.0.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect @@ -168,7 +169,6 @@ require ( github.com/spf13/cast v1.5.1 // indirect github.com/stretchr/objx v0.5.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect - github.com/tetratelabs/wazero v1.3.1 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect diff --git a/go.sum b/go.sum index cfc064271c..da987e8434 100644 --- a/go.sum +++ b/go.sum @@ -64,6 +64,8 @@ github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBT github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= github.com/bxcodec/faker v2.0.1+incompatible/go.mod h1:BNzfpVdTwnFJ6GtfYTcQu6l6rHShT+veBxNCnjCx5XM= +github.com/bytecodealliance/wasmtime-go/v14 v14.0.0 h1:ur7S3P+PAeJmgllhSrKnGQOAmmtUbLQxb/nw2NZiaEM= +github.com/bytecodealliance/wasmtime-go/v14 v14.0.0/go.mod h1:tqOVEUjnXY6aGpSfM9qdVRR6G//Yc513fFYUdzZb/DY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -360,8 +362,8 @@ github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25 h1:hC67vWtvuDnw8w6u4jLFoj3SOH92/4Lq8SCR++L7njw= -github.com/lens-vm/lens/host-go v0.0.0-20230729032926-5acb4df9bd25/go.mod h1:rDE4oJUIAQoXX9heUg8VOQf5LscRWj0BeE5mbGqOs3E= +github.com/lens-vm/lens/host-go v0.0.0-20231108161805-be145dd591bb h1:e/9Oqk71LAu/qADRkDMgJAvb52CU1Ks27GBsIdaqW1c= +github.com/lens-vm/lens/host-go v0.0.0-20231108161805-be145dd591bb/go.mod h1:RNLpJkOAuqT392hxxz0FQiMiEd3pY8eI5s1AUEPAOeE= github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg= github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38yPW7c= @@ -600,8 +602,6 @@ github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8 github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= -github.com/tetratelabs/wazero v1.3.1 h1:rnb9FgOEQRLLR8tgoD1mfjNjMhFeWRUk+a4b4j/GpUM= -github.com/tetratelabs/wazero v1.3.1/go.mod h1:wYx2gNRg8/WihJfSDxA1TIL8H+GkfLYm+bIfbblu9VQ= github.com/textileio/go-datastore-extensions v1.0.1 h1:qIJGqJaigQ1wD4TdwS/hf73u0HChhXvvUSJuxBEKS+c= github.com/textileio/go-ds-badger3 v0.1.0 h1:q0kBuBmAcRUR3ClMSYlyw0224XeuzjjGinU53Qz1uXI= github.com/textileio/go-log/v2 v2.1.3-gke-2 h1:YkMA5ua0Cf/X6CkbexInsoJ/HdaHQBlgiv9Yy9hddNM= diff --git a/lens/registry.go b/lens/registry.go index a4074ca7f4..20b125a498 100644 --- a/lens/registry.go +++ b/lens/registry.go @@ -19,7 +19,7 @@ import ( "github.com/lens-vm/lens/host-go/config" "github.com/lens-vm/lens/host-go/config/model" "github.com/lens-vm/lens/host-go/engine/module" - "github.com/lens-vm/lens/host-go/runtimes/wazero" + "github.com/lens-vm/lens/host-go/runtimes/wasmtime" "github.com/sourcenetwork/immutable" "github.com/sourcenetwork/immutable/enumerable" @@ -104,7 +104,7 @@ func NewRegistry(lensPoolSize immutable.Option[int], db TxnSource) client.LensRe db: db, registry: &lensRegistry{ poolSize: size, - runtime: wazero.New(), + runtime: wasmtime.New(), modulesByPath: map[string]module.Module{}, lensPoolsBySchemaVersionID: map[string]*lensPool{}, reversedPoolsBySchemaVersionID: map[string]*lensPool{}, From f65d7fa32fe1b5fc977553ebb3fa43e728f7eaba Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 20 Nov 2023 14:46:24 -0500 Subject: [PATCH 02/60] ci: Add windows test runner (#2033) ## Relevant issue(s) Resolves #2032 ## Description Adds a windows test run to our test matrix. --- .../workflows/test-and-upload-coverage.yml | 11 +++ config/configfile_test.go | 5 ++ datastore/badger/v4/datastore_test.go | 68 +++++++++++++++ datastore/memory/memory_test.go | 4 +- db/index_test.go | 51 +++++++++++ db/indexed_docs_test.go | 18 ++++ http/errors.go | 16 +++- http/server.go | 2 +- http/server_test.go | 2 +- logging/logging_test.go | 6 ++ net/client_test.go | 4 + net/dialer_test.go | 27 ++++-- net/node_test.go | 29 ++++++- net/peer_test.go | 87 +++++++++---------- .../integration/backup/simple/export_test.go | 2 +- .../integration/backup/simple/import_test.go | 2 +- 16 files changed, 272 insertions(+), 62 deletions(-) diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index 6535b2df5e..15846256fb 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -28,6 +28,7 @@ jobs: name: Run tests matrix job strategy: + fail-fast: true matrix: os: [ubuntu-latest] client-type: [go, http, cli] @@ -45,9 +46,19 @@ jobs: database-type: badger-memory mutation-type: collection-save detect-changes: false + - os: windows-latest + client-type: go + database-type: badger-memory + mutation-type: collection-save + detect-changes: false runs-on: ${{ matrix.os }} + # We run all runners via the bash shell to provide us with a consistent set of env variables and commands + defaults: + run: + shell: bash + env: CGO_ENABLED: 1 DEFRA_CLIENT_GO: ${{ matrix.client-type == 'go' }} diff --git a/config/configfile_test.go b/config/configfile_test.go index a9ec874f9f..5f7aed26aa 100644 --- a/config/configfile_test.go +++ b/config/configfile_test.go @@ -14,6 +14,7 @@ import ( "bytes" "os" "path/filepath" + "runtime" "testing" "text/template" @@ -61,6 +62,10 @@ func TestWritesConfigFileErroneousPath(t *testing.T) { } func TestReadConfigFileForLogger(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skipf("Test is not supported on windows as it leaks resources, see https://github.com/sourcenetwork/defradb/issues/2057") + } + cfg := DefaultConfig() tmpdir := t.TempDir() cfg.Rootdir = tmpdir diff --git a/datastore/badger/v4/datastore_test.go b/datastore/badger/v4/datastore_test.go index 1f4f0bb5d2..69a24981df 100644 --- a/datastore/badger/v4/datastore_test.go +++ b/datastore/badger/v4/datastore_test.go @@ -60,6 +60,10 @@ func TestNewDatastoreWithOptions(t *testing.T) { s, err := NewDatastore(dir, &opt) require.NoError(t, err) + defer func() { + err := s.Close() + require.NoError(t, err) + }() s.Put(ctx, testKey1, testValue1) s.Put(ctx, testKey2, testValue2) @@ -68,6 +72,10 @@ func TestNewDatastoreWithOptions(t *testing.T) { func TestNewBatch(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() b, err := s.Batch(ctx) require.NoError(t, err) @@ -77,6 +85,10 @@ func TestNewBatch(t *testing.T) { func TestBatchOperations(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() b, err := s.Batch(ctx) require.NoError(t, err) @@ -154,6 +166,10 @@ func TestBatchCommitWithStoreClosed(t *testing.T) { func TestBatchConsecutiveCommit(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() b, err := s.Batch(ctx) require.NoError(t, err) @@ -168,6 +184,10 @@ func TestBatchConsecutiveCommit(t *testing.T) { func TestCollectGarbage(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() err := s.CollectGarbage(ctx) require.NoError(t, err) @@ -206,6 +226,10 @@ func TestConsecutiveClose(t *testing.T) { func TestGetOperation(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() resp, err := s.Get(ctx, testKey1) require.NoError(t, err) @@ -225,6 +249,10 @@ func TestGetOperationWithStoreClosed(t *testing.T) { func TestGetOperationNotFound(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() _, err := s.Get(ctx, testKey3) require.ErrorIs(t, err, ds.ErrNotFound) @@ -233,6 +261,10 @@ func TestGetOperationNotFound(t *testing.T) { func TestDeleteOperation(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() err := s.Delete(ctx, testKey1) require.NoError(t, err) @@ -244,6 +276,10 @@ func TestDeleteOperation(t *testing.T) { func TestDeleteOperation2(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() err := s.Put(ctx, testKey1, testValue1) require.NoError(t, err) @@ -269,6 +305,10 @@ func TestDeleteOperationWithStoreClosed(t *testing.T) { func TestGetSizeOperation(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() resp, err := s.GetSize(ctx, testKey1) require.NoError(t, err) @@ -278,6 +318,10 @@ func TestGetSizeOperation(t *testing.T) { func TestGetSizeOperationNotFound(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() _, err := s.GetSize(ctx, testKey3) require.ErrorIs(t, err, ds.ErrNotFound) @@ -297,6 +341,10 @@ func TestGetSizeOperationWithStoreClosed(t *testing.T) { func TestHasOperation(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() resp, err := s.Has(ctx, testKey1) require.NoError(t, err) @@ -306,6 +354,10 @@ func TestHasOperation(t *testing.T) { func TestHasOperationNotFound(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() resp, err := s.Has(ctx, testKey3) require.NoError(t, err) @@ -326,6 +378,10 @@ func TestHasOperationWithStoreClosed(t *testing.T) { func TestPutOperation(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() err := s.Put(ctx, testKey3, testValue3) require.NoError(t, err) @@ -349,6 +405,10 @@ func TestPutOperationWithStoreClosed(t *testing.T) { func TestQueryOperation(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() results, err := s.Query(ctx, dsq.Query{ Limit: 1, @@ -379,6 +439,10 @@ func TestQueryOperationWithStoreClosed(t *testing.T) { func TestDiskUsage(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() size, err := s.DiskUsage(ctx) require.NoError(t, err) @@ -399,6 +463,10 @@ func TestDiskUsageWithStoreClosed(t *testing.T) { func TestSync(t *testing.T) { ctx := context.Background() s := newLoadedDatastore(ctx, t) + defer func() { + err := s.Close() + require.NoError(t, err) + }() err := s.Sync(ctx, testKey1) require.NoError(t, err) } diff --git a/datastore/memory/memory_test.go b/datastore/memory/memory_test.go index 1dbf7a825e..8cfa26823f 100644 --- a/datastore/memory/memory_test.go +++ b/datastore/memory/memory_test.go @@ -475,7 +475,9 @@ func TestClearOldFlightTransactions(t *testing.T) { s.inFlightTxn.Set(dsTxn{ dsVersion: s.getVersion(), txnVersion: s.getVersion() + 1, - expiresAt: time.Now(), + // Ensure expiresAt is before the value returned from the later call in `clearOldInFlightTxn`, + // in windows in particular it seems that the two `time.Now` calls can return the same value + expiresAt: time.Now().Add(-1 * time.Minute), }) require.Equal(t, 1, s.inFlightTxn.Len()) diff --git a/db/index_test.go b/db/index_test.go index e5682b551c..e85fd9bfb4 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -239,6 +239,7 @@ func (f *indexTestFixture) getCollectionIndexes(colName string) ([]client.IndexD func TestCreateIndex_IfFieldsIsEmpty_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() _, err := f.createCollectionIndex(client.IndexDescription{ Name: "some_index_name", @@ -248,6 +249,7 @@ func TestCreateIndex_IfFieldsIsEmpty_ReturnError(t *testing.T) { func TestCreateIndex_IfIndexDescriptionIDIsNotZero_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() for _, id := range []uint32{1, 20, 999} { desc := client.IndexDescription{ @@ -264,6 +266,7 @@ func TestCreateIndex_IfIndexDescriptionIDIsNotZero_ReturnError(t *testing.T) { func TestCreateIndex_IfValidInput_CreateIndex(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := client.IndexDescription{ Name: "some_index_name", @@ -279,6 +282,7 @@ func TestCreateIndex_IfValidInput_CreateIndex(t *testing.T) { func TestCreateIndex_IfFieldNameIsEmpty_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := client.IndexDescription{ Name: "some_index_name", @@ -292,6 +296,7 @@ func TestCreateIndex_IfFieldNameIsEmpty_ReturnError(t *testing.T) { func TestCreateIndex_IfFieldHasNoDirection_DefaultToAsc(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := client.IndexDescription{ Name: "some_index_name", @@ -304,6 +309,7 @@ func TestCreateIndex_IfFieldHasNoDirection_DefaultToAsc(t *testing.T) { func TestCreateIndex_IfSingleFieldInDescOrder_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := client.IndexDescription{ Fields: []client.IndexedFieldDescription{ @@ -316,6 +322,7 @@ func TestCreateIndex_IfSingleFieldInDescOrder_ReturnError(t *testing.T) { func TestCreateIndex_IfIndexWithNameAlreadyExists_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() name := "some_index_name" desc1 := client.IndexDescription{ @@ -334,6 +341,7 @@ func TestCreateIndex_IfIndexWithNameAlreadyExists_ReturnError(t *testing.T) { func TestCreateIndex_IfGeneratedNameMatchesExisting_AddIncrement(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() name := usersColName + "_" + usersAgeFieldName + "_ASC" desc1 := client.IndexDescription{ @@ -359,6 +367,7 @@ func TestCreateIndex_IfGeneratedNameMatchesExisting_AddIncrement(t *testing.T) { func TestCreateIndex_ShouldSaveToSystemStorage(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() name := "users_age_ASC" desc := client.IndexDescription{ @@ -380,6 +389,7 @@ func TestCreateIndex_ShouldSaveToSystemStorage(t *testing.T) { func TestCreateIndex_IfCollectionDoesntExist_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := client.IndexDescription{ Fields: []client.IndexedFieldDescription{{Name: productsPriceFieldName}}, @@ -391,6 +401,7 @@ func TestCreateIndex_IfCollectionDoesntExist_ReturnError(t *testing.T) { func TestCreateIndex_IfPropertyDoesntExist_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() const field = "non_existing_field" desc := client.IndexDescription{ @@ -433,10 +444,13 @@ func TestCreateIndex_WithMultipleCollectionsAndIndexes_AssignIncrementedIDPerCol func TestCreateIndex_IfFailsToCreateTxn_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() testErr := errors.New("test error") mockedRootStore := mocks.NewRootStore(t) + mockedRootStore.On("Close").Return(nil) + mockedRootStore.EXPECT().NewTransaction(mock.Anything, mock.Anything).Return(nil, testErr) f.db.rootstore = mockedRootStore @@ -446,6 +460,7 @@ func TestCreateIndex_IfFailsToCreateTxn_ReturnError(t *testing.T) { func TestCreateIndex_IfProvideInvalidIndexName_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() indexDesc := getUsersIndexDescOnName() indexDesc.Name = "!" @@ -455,6 +470,7 @@ func TestCreateIndex_IfProvideInvalidIndexName_ReturnError(t *testing.T) { func TestCreateIndex_ShouldUpdateCollectionsDescription(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() indOnName, err := f.users.CreateIndex(f.ctx, getUsersIndexDescOnName()) require.NoError(t, err) @@ -500,6 +516,7 @@ func TestCreateIndex_IfAttemptToIndexOnUnsupportedType_ReturnError(t *testing.T) func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() usersIndexDesc := client.IndexDescription{ Name: "users_name_index", @@ -531,6 +548,7 @@ func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { func TestGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() indexKey := core.NewCollectionIndexKey(usersColName, "users_name_index") err := f.txn.Systemstore().Put(f.ctx, indexKey.ToDS(), []byte("invalid")) @@ -542,6 +560,7 @@ func TestGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { func TestGetIndexes_IfInvalidIndexKeyIsStored_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() indexKey := core.NewCollectionIndexKey(usersColName, "users_name_index") key := ds.NewKey(indexKey.ToString() + "/invalid") @@ -561,6 +580,7 @@ func TestGetIndexes_IfInvalidIndexKeyIsStored_ReturnError(t *testing.T) { func TestGetIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() testErr := errors.New("test error") @@ -575,6 +595,7 @@ func TestGetIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { func TestGetIndexes_IfSystemStoreFails_ShouldCloseIterator(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() mockedTxn := f.mockTxn() mockedTxn.MockSystemstore.EXPECT().Query(mock.Anything, mock.Anything).Unset() @@ -587,6 +608,7 @@ func TestGetIndexes_IfSystemStoreFails_ShouldCloseIterator(t *testing.T) { func TestGetIndexes_IfSystemStoreQueryIteratorFails_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() testErr := errors.New("test error") @@ -604,6 +626,7 @@ func TestGetIndexes_IfSystemStoreQueryIteratorFails_ReturnError(t *testing.T) { func TestGetIndexes_IfSystemStoreHasInvalidData_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() mockedTxn := f.mockTxn() @@ -619,6 +642,7 @@ func TestGetIndexes_IfSystemStoreHasInvalidData_ReturnError(t *testing.T) { func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() usersIndexDesc := client.IndexDescription{ Name: "users_name_index", @@ -656,6 +680,7 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T) func TestGetCollectionIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() testErr := errors.New("test error") @@ -671,6 +696,7 @@ func TestGetCollectionIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { func TestGetCollectionIndexes_IfSystemStoreFails_ShouldCloseIterator(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() mockedTxn := f.mockTxn() mockedTxn.MockSystemstore = mocks.NewDSReaderWriter(t) @@ -685,6 +711,7 @@ func TestGetCollectionIndexes_IfSystemStoreFails_ShouldCloseIterator(t *testing. func TestGetCollectionIndexes_IfSystemStoreQueryIteratorFails_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() testErr := errors.New("test error") @@ -701,6 +728,7 @@ func TestGetCollectionIndexes_IfSystemStoreQueryIteratorFails_ReturnError(t *tes func TestGetCollectionIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() indexKey := core.NewCollectionIndexKey(usersColName, "users_name_index") err := f.txn.Systemstore().Put(f.ctx, indexKey.ToDS(), []byte("invalid")) @@ -712,6 +740,7 @@ func TestGetCollectionIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { func TestCollectionGetIndexes_ShouldReturnIndexes(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -724,6 +753,7 @@ func TestCollectionGetIndexes_ShouldReturnIndexes(t *testing.T) { func TestCollectionGetIndexes_ShouldCloseQueryIterator(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -784,6 +814,7 @@ func TestCollectionGetIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { for _, testCase := range testCases { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -800,6 +831,7 @@ func TestCollectionGetIndexes_IfSystemStoreFails_ReturnError(t *testing.T) { func TestCollectionGetIndexes_IfFailsToCreateTxn_ShouldNotCache(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -861,6 +893,7 @@ func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *te func TestCollectionGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() f.createUserCollectionIndexOnAge() @@ -877,6 +910,7 @@ func TestCollectionGetIndexes_IfInvalidIndexIsStored_ReturnError(t *testing.T) { func TestCollectionGetIndexes_IfIndexIsCreated_ReturnUpdateIndexes(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -894,6 +928,7 @@ func TestCollectionGetIndexes_IfIndexIsCreated_ReturnUpdateIndexes(t *testing.T) func TestCollectionGetIndexes_IfIndexIsDropped_ReturnUpdateIndexes(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() f.createUserCollectionIndexOnAge() @@ -982,6 +1017,7 @@ func TestCollectionGetIndexes_ShouldReturnIndexesInOrderedByName(t *testing.T) { func TestDropIndex_ShouldDeleteIndex(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := f.createUserCollectionIndexOnName() err := f.dropIndex(usersColName, desc.Name) @@ -994,6 +1030,7 @@ func TestDropIndex_ShouldDeleteIndex(t *testing.T) { func TestDropIndex_IfStorageFails_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := f.createUserCollectionIndexOnName() f.db.Close() @@ -1003,6 +1040,7 @@ func TestDropIndex_IfStorageFails_ReturnError(t *testing.T) { func TestDropIndex_IfCollectionDoesntExist_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() err := f.dropIndex(productsColName, "any_name") assert.ErrorIs(t, err, NewErrCanNotReadCollection(usersColName, nil)) @@ -1010,12 +1048,15 @@ func TestDropIndex_IfCollectionDoesntExist_ReturnError(t *testing.T) { func TestDropIndex_IfFailsToCreateTxn_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() testErr := errors.New("test error") mockedRootStore := mocks.NewRootStore(t) + mockedRootStore.On("Close").Return(nil) + mockedRootStore.EXPECT().NewTransaction(mock.Anything, mock.Anything).Return(nil, testErr) f.db.rootstore = mockedRootStore @@ -1025,6 +1066,7 @@ func TestDropIndex_IfFailsToCreateTxn_ReturnError(t *testing.T) { func TestDropIndex_IfFailsToDeleteFromStorage_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -1043,6 +1085,7 @@ func TestDropIndex_IfFailsToDeleteFromStorage_ReturnError(t *testing.T) { func TestDropIndex_ShouldUpdateCollectionsDescription(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() col := f.users.WithTxn(f.txn) _, err := col.CreateIndex(f.ctx, getUsersIndexDescOnName()) require.NoError(t, err) @@ -1064,6 +1107,7 @@ func TestDropIndex_ShouldUpdateCollectionsDescription(t *testing.T) { func TestDropIndex_IfIndexWithNameDoesNotExist_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() const name = "not_existing_index" err := f.users.DropIndex(f.ctx, name) @@ -1074,6 +1118,7 @@ func TestDropIndex_IfSystemStoreFails_ReturnError(t *testing.T) { testErr := errors.New("test error") f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() @@ -1091,6 +1136,7 @@ func TestDropIndex_IfSystemStoreFails_ReturnError(t *testing.T) { func TestDropAllIndexes_ShouldDeleteAllIndexes(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() _, err := f.createCollectionIndexFor(usersColName, client.IndexDescription{ Fields: []client.IndexedFieldDescription{ {Name: usersNameFieldName, Direction: client.Ascending}, @@ -1115,6 +1161,7 @@ func TestDropAllIndexes_ShouldDeleteAllIndexes(t *testing.T) { func TestDropAllIndexes_IfStorageFails_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() f.db.Close() @@ -1165,6 +1212,7 @@ func TestDropAllIndexes_IfSystemStorageFails_ReturnError(t *testing.T) { for _, testCase := range testCases { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() mockedTxn := f.mockTxn() @@ -1180,6 +1228,7 @@ func TestDropAllIndexes_IfSystemStorageFails_ReturnError(t *testing.T) { func TestDropAllIndexes_ShouldCloseQueryIterator(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() mockedTxn := f.mockTxn() @@ -1198,6 +1247,7 @@ func TestDropAllIndexes_ShouldCloseQueryIterator(t *testing.T) { func TestNewCollectionIndex_IfDescriptionHasNoFields_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := getUsersIndexDescOnName() desc.Fields = nil _, err := NewCollectionIndex(f.users, desc) @@ -1206,6 +1256,7 @@ func TestNewCollectionIndex_IfDescriptionHasNoFields_ReturnError(t *testing.T) { func TestNewCollectionIndex_IfDescriptionHasNonExistingField_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() desc := getUsersIndexDescOnName() desc.Fields[0].Name = "non_existing_field" _, err := NewCollectionIndex(f.users, desc) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 6503429c96..f1f8d6270f 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -249,6 +249,7 @@ func (f *indexTestFixture) stubSystemStore(systemStoreOn *mocks.DSReaderWriter_E func TestNonUnique_IfDocIsAdded_ShouldBeIndexed(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -263,6 +264,7 @@ func TestNonUnique_IfDocIsAdded_ShouldBeIndexed(t *testing.T) { func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -281,6 +283,7 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() data, err := json.Marshal(struct { @@ -302,6 +305,7 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -317,6 +321,7 @@ func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -334,6 +339,7 @@ func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnAge() doc := f.newUserDoc("John", 21) @@ -379,6 +385,7 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t func TestNonUnique_IfMultipleIndexes_StoreIndexWithIndexID(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() f.createUserCollectionIndexOnAge() @@ -482,6 +489,7 @@ func TestNonUnique_StoringIndexedFieldValueOfDifferentTypes(t *testing.T) { func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() docJSON, err := json.Marshal(struct { @@ -504,6 +512,7 @@ func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { func TestNonUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() doc1 := f.newUserDoc("John", 21) f.saveDocToCollection(doc1, f.users) @@ -578,6 +587,7 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t for _, tc := range cases { f := newIndexTestFixture(t) + defer f.db.Close() doc := f.newUserDoc("John", 21) f.saveDocToCollection(doc, f.users) @@ -595,6 +605,7 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() doc := f.newUserDoc("John", 21) f.saveDocToCollection(doc, f.users) @@ -652,6 +663,7 @@ func TestNonUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() cases := []struct { @@ -698,6 +710,7 @@ func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -786,6 +799,7 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { t.Log(tc.Name) f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -810,6 +824,7 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { func TestNonUniqueUpdate_IfFailsToUpdateIndex_ReturnError(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnAge() doc := f.newUserDoc("John", 21) @@ -829,6 +844,7 @@ func TestNonUniqueUpdate_IfFailsToUpdateIndex_ReturnError(t *testing.T) { func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() f.createUserCollectionIndexOnAge() @@ -891,6 +907,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { t.Log(tc.Name) f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() doc := f.newUserDoc("John", 21) @@ -922,6 +939,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { f := newIndexTestFixture(t) + defer f.db.Close() f.createUserCollectionIndexOnName() docJSON, err := json.Marshal(struct { diff --git a/http/errors.go b/http/errors.go index 7e07053df5..dae6a2d863 100644 --- a/http/errors.go +++ b/http/errors.go @@ -12,7 +12,12 @@ package http import ( "encoding/json" - "errors" + + "github.com/sourcenetwork/defradb/errors" +) + +const ( + errFailedToLoadKeys string = "failed to load given keys" ) // Errors returnable from this package. @@ -56,3 +61,12 @@ func (e *errorResponse) UnmarshalJSON(data []byte) error { e.Error = parseError(out["error"]) return nil } + +func NewErrFailedToLoadKeys(inner error, publicKeyPath, privateKeyPath string) error { + return errors.Wrap( + errFailedToLoadKeys, + inner, + errors.NewKV("PublicKeyPath", publicKeyPath), + errors.NewKV("PrivateKeyPath", privateKeyPath), + ) +} diff --git a/http/server.go b/http/server.go index 384264a8a6..768542c68d 100644 --- a/http/server.go +++ b/http/server.go @@ -272,7 +272,7 @@ func (s *Server) listenWithTLS(ctx context.Context) error { s.options.TLS.Value().PublicKey, ) if err != nil { - return errors.WithStack(err) + return NewErrFailedToLoadKeys(err, s.options.TLS.Value().PublicKey, s.options.TLS.Value().PrivateKey) } cfg.Certificates = []tls.Certificate{cert} diff --git a/http/server_test.go b/http/server_test.go index 5e970ad317..04095b7c15 100644 --- a/http/server_test.go +++ b/http/server_test.go @@ -105,7 +105,7 @@ func TestNewServerAndRunWithSelfSignedCertAndNoKeyFiles(t *testing.T) { go func() { close(serverRunning) err := s.Listen(ctx) - assert.Contains(t, err.Error(), "no such file or directory") + assert.Contains(t, err.Error(), "failed to load given keys") defer close(serverDone) }() diff --git a/logging/logging_test.go b/logging/logging_test.go index 0c776ffa33..5a19cfb744 100644 --- a/logging/logging_test.go +++ b/logging/logging_test.go @@ -8,6 +8,12 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. +// todo: The logger(s) appear to leak resources and do not close down promptly on windows, +// the log files have open readers when the Golang test runner attempts to delete them. +// See https://github.com/sourcenetwork/defradb/issues/2057 for more info. + +//go:build !windows + package logging import ( diff --git a/net/client_test.go b/net/client_test.go index a390485e25..df07e00c34 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -25,6 +25,7 @@ import ( func TestPushlogWithDialFailure(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`)) require.NoError(t, err) @@ -51,6 +52,7 @@ func TestPushlogWithDialFailure(t *testing.T) { func TestPushlogWithInvalidPeerID(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`)) require.NoError(t, err) @@ -71,8 +73,10 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { ctx := context.Background() _, n1 := newTestNode(ctx, t) + defer n1.Close() n1.Start() _, n2 := newTestNode(ctx, t) + defer n2.Close() n2.Start() err := n1.host.Connect(ctx, n2.PeerInfo()) diff --git a/net/dialer_test.go b/net/dialer_test.go index d092602490..d14ed5823e 100644 --- a/net/dialer_test.go +++ b/net/dialer_test.go @@ -21,20 +21,23 @@ import ( ) func TestDial_WithConnectedPeer_NoError(t *testing.T) { - db := FixtureNewMemoryDBWithBroadcaster(t) + db1 := FixtureNewMemoryDBWithBroadcaster(t) + db2 := FixtureNewMemoryDBWithBroadcaster(t) ctx := context.Background() n1, err := NewNode( ctx, - db, + db1, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) assert.NoError(t, err) + defer n1.Close() n2, err := NewNode( ctx, - db, + db2, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) assert.NoError(t, err) + defer n2.Close() addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) if err != nil { t.Fatal(err) @@ -45,20 +48,23 @@ func TestDial_WithConnectedPeer_NoError(t *testing.T) { } func TestDial_WithConnectedPeerAndSecondConnection_NoError(t *testing.T) { - db := FixtureNewMemoryDBWithBroadcaster(t) + db1 := FixtureNewMemoryDBWithBroadcaster(t) + db2 := FixtureNewMemoryDBWithBroadcaster(t) ctx := context.Background() n1, err := NewNode( ctx, - db, + db1, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) assert.NoError(t, err) + defer n1.Close() n2, err := NewNode( ctx, - db, + db2, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) assert.NoError(t, err) + defer n2.Close() addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) if err != nil { t.Fatal(err) @@ -72,20 +78,23 @@ func TestDial_WithConnectedPeerAndSecondConnection_NoError(t *testing.T) { } func TestDial_WithConnectedPeerAndSecondConnectionWithConnectionShutdown_ClosingConnectionError(t *testing.T) { - db := FixtureNewMemoryDBWithBroadcaster(t) + db1 := FixtureNewMemoryDBWithBroadcaster(t) + db2 := FixtureNewMemoryDBWithBroadcaster(t) ctx := context.Background() n1, err := NewNode( ctx, - db, + db1, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) assert.NoError(t, err) + defer n1.Close() n2, err := NewNode( ctx, - db, + db2, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) assert.NoError(t, err) + defer n2.Close() addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) if err != nil { t.Fatal(err) diff --git a/net/node_test.go b/net/node_test.go index 15ccc7d065..fcceeb00a3 100644 --- a/net/node_test.go +++ b/net/node_test.go @@ -49,12 +49,13 @@ func TestNewNode_WithEnableRelay_NoError(t *testing.T) { store := memory.NewDatastore(ctx) db, err := db.NewDB(ctx, store, db.WithUpdateEvents()) require.NoError(t, err) - _, err = NewNode( + n, err := NewNode( context.Background(), db, WithEnableRelay(true), ) require.NoError(t, err) + defer n.Close() } func TestNewNode_WithDBClosed_NoError(t *testing.T) { @@ -83,6 +84,7 @@ func TestNewNode_NoPubSub_NoError(t *testing.T) { WithPubSub(false), ) require.NoError(t, err) + defer n.Close() require.Nil(t, n.ps) } @@ -99,6 +101,7 @@ func TestNewNode_WithPubSub_NoError(t *testing.T) { ) require.NoError(t, err) + defer n.Close() // overly simple check of validity of pubsub, avoiding the process of creating a PubSub require.NotNil(t, n.ps) } @@ -128,6 +131,7 @@ func TestNewNode_BootstrapWithNoPeer_NoError(t *testing.T) { WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) require.NoError(t, err) + defer n1.Close() n1.Bootstrap([]peer.AddrInfo{}) } @@ -143,12 +147,14 @@ func TestNewNode_BootstrapWithOnePeer_NoError(t *testing.T) { WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) require.NoError(t, err) + defer n1.Close() n2, err := NewNode( ctx, db, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) require.NoError(t, err) + defer n2.Close() addrs, err := netutils.ParsePeers([]string{n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String()}) if err != nil { t.Fatal(err) @@ -168,12 +174,14 @@ func TestNewNode_BootstrapWithOneValidPeerAndManyInvalidPeers_NoError(t *testing WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) require.NoError(t, err) + defer n1.Close() n2, err := NewNode( ctx, db, WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) require.NoError(t, err) + defer n2.Close() addrs, err := netutils.ParsePeers([]string{ n1.host.Addrs()[0].String() + "/p2p/" + n1.PeerID().String(), "/ip4/0.0.0.0/tcp/1234/p2p/" + "12D3KooWC8YY6Tx3uAeHsdBmoy7PJPwqXAHE4HkCZ5veankKWci6", @@ -195,6 +203,7 @@ func TestListenAddrs_WithListenP2PAddrStrings_NoError(t *testing.T) { WithListenP2PAddrStrings("/ip4/0.0.0.0/tcp/0"), ) require.NoError(t, err) + defer n.Close() require.Contains(t, n.ListenAddrs()[0].String(), "/tcp/") } @@ -235,6 +244,7 @@ func TestPeerConnectionEventEmitter_MultiEvent_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(event.EvtPeerConnectednessChanged)) require.NoError(t, err) @@ -253,6 +263,7 @@ func TestSubscribeToPubSubEvents_SubscriptionError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() n.Peer.host = &mockHost{n.Peer.host} @@ -266,6 +277,7 @@ func TestPubSubEventEmitter_MultiEvent_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtPubSub)) require.NoError(t, err) @@ -284,6 +296,7 @@ func TestSubscribeToPushLogEvents_SubscriptionError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() n.Peer.host = &mockHost{n.Peer.host} @@ -297,6 +310,7 @@ func TestPushLogEventEmitter_SingleEvent_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -312,6 +326,7 @@ func TestPushLogEventEmitter_MultiEvent_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -330,6 +345,7 @@ func TestWaitForPeerConnectionEvent_WithSamePeer_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(event.EvtPeerConnectednessChanged)) require.NoError(t, err) @@ -354,6 +370,7 @@ func TestWaitForPeerConnectionEvent_WithDifferentPeer_TimeoutError(t *testing.T) db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(event.EvtPeerConnectednessChanged)) require.NoError(t, err) @@ -372,6 +389,7 @@ func TestWaitForPeerConnectionEvent_WithDifferentPeerAndContextClosed_NoError(t db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(event.EvtPeerConnectednessChanged)) require.NoError(t, err) @@ -392,6 +410,7 @@ func TestWaitForPubSubEvent_WithSamePeer_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtPubSub)) require.NoError(t, err) @@ -416,6 +435,7 @@ func TestWaitForPubSubEvent_WithDifferentPeer_TimeoutError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtPubSub)) require.NoError(t, err) @@ -434,6 +454,7 @@ func TestWaitForPubSubEvent_WithDifferentPeerAndContextClosed_NoError(t *testing db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtPubSub)) require.NoError(t, err) @@ -455,6 +476,7 @@ func TestWaitForPushLogByPeerEvent_WithSamePeer_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -480,6 +502,7 @@ func TestWaitForPushLogByPeerEvent_WithDifferentPeer_TimeoutError(t *testing.T) db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -499,6 +522,7 @@ func TestWaitForPushLogByPeerEvent_WithDifferentPeerAndContextClosed_NoError(t * db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -520,6 +544,7 @@ func TestWaitForPushLogFromPeerEvent_WithSamePeer_NoError(t *testing.T) { db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -545,6 +570,7 @@ func TestWaitForPushLogFromPeerEvent_WithDifferentPeer_TimeoutError(t *testing.T db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) @@ -564,6 +590,7 @@ func TestWaitForPushLogFromPeerEvent_WithDifferentPeerAndContextClosed_NoError(t db, ) require.NoError(t, err) + defer n.Close() emitter, err := n.host.EventBus().Emitter(new(EvtReceivedPushLog)) require.NoError(t, err) diff --git a/net/peer_test.go b/net/peer_test.go index 1ce11e238f..cdbc4581dc 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -197,6 +197,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { func TestStartAndClose_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() err := n.Start() require.NoError(t, err) @@ -323,6 +324,7 @@ func TestStart_WitClosedUpdateChannel_ClosedChannelError(t *testing.T) { func TestRegisterNewDocument_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -346,6 +348,7 @@ func TestRegisterNewDocument_NoError(t *testing.T) { func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -372,6 +375,7 @@ func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { func TestSetReplicator_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -392,6 +396,7 @@ func TestSetReplicator_NoError(t *testing.T) { func TestSetReplicator_WithInvalidAddress_EmptyPeerIDError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -409,6 +414,7 @@ func TestSetReplicator_WithInvalidAddress_EmptyPeerIDError(t *testing.T) { func TestSetReplicator_WithDBClosed_DatastoreClosedError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() db.Close() @@ -425,6 +431,7 @@ func TestSetReplicator_WithDBClosed_DatastoreClosedError(t *testing.T) { func TestSetReplicator_WithUndefinedCollection_KeyNotFoundError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() info, err := peer.AddrInfoFromString("/ip4/0.0.0.0/tcp/0/p2p/QmYyQSo1c1Ym7orWxLYvCrM2EmxFTANf8wXmmE7DWjhx5N") require.NoError(t, err) @@ -439,6 +446,7 @@ func TestSetReplicator_WithUndefinedCollection_KeyNotFoundError(t *testing.T) { func TestSetReplicator_ForAllCollections_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -458,6 +466,7 @@ func TestSetReplicator_ForAllCollections_NoError(t *testing.T) { func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String age: Int @@ -485,6 +494,7 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test func TestDeleteReplicator_WithDBClosed_DataStoreClosedError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() info := peer.AddrInfo{ ID: n.PeerID(), @@ -503,6 +513,7 @@ func TestDeleteReplicator_WithDBClosed_DataStoreClosedError(t *testing.T) { func TestDeleteReplicator_WithTargetSelf_SelfTargetForReplicatorError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() err := n.Peer.DeleteReplicator(ctx, client.Replicator{ Info: n.PeerInfo(), @@ -514,8 +525,10 @@ func TestDeleteReplicator_WithTargetSelf_SelfTargetForReplicatorError(t *testing func TestDeleteReplicator_WithInvalidCollection_KeyNotFoundError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() _, n2 := newTestNode(ctx, t) + defer n2.Close() err := n.Peer.DeleteReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -527,6 +540,7 @@ func TestDeleteReplicator_WithInvalidCollection_KeyNotFoundError(t *testing.T) { func TestDeleteReplicator_WithCollectionAndPreviouslySetReplicator_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -535,6 +549,7 @@ func TestDeleteReplicator_WithCollectionAndPreviouslySetReplicator_NoError(t *te require.NoError(t, err) _, n2 := newTestNode(ctx, t) + defer n2.Close() err = n.Peer.SetReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -550,8 +565,10 @@ func TestDeleteReplicator_WithCollectionAndPreviouslySetReplicator_NoError(t *te func TestDeleteReplicator_WithNoCollection_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() _, n2 := newTestNode(ctx, t) + defer n2.Close() err := n.Peer.DeleteReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -562,6 +579,7 @@ func TestDeleteReplicator_WithNoCollection_NoError(t *testing.T) { func TestDeleteReplicator_WithNotSetReplicator_KeyNotFoundError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -570,6 +588,7 @@ func TestDeleteReplicator_WithNotSetReplicator_KeyNotFoundError(t *testing.T) { require.NoError(t, err) _, n2 := newTestNode(ctx, t) + defer n2.Close() err = n.Peer.DeleteReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -581,6 +600,7 @@ func TestDeleteReplicator_WithNotSetReplicator_KeyNotFoundError(t *testing.T) { func TestGetAllReplicator_WithReplicator_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -589,6 +609,7 @@ func TestGetAllReplicator_WithReplicator_NoError(t *testing.T) { require.NoError(t, err) _, n2 := newTestNode(ctx, t) + defer n2.Close() err = n.Peer.SetReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -605,6 +626,7 @@ func TestGetAllReplicator_WithReplicator_NoError(t *testing.T) { func TestGetAllReplicator_WithDBClosed_DatastoreClosedError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() db.Close() @@ -615,6 +637,7 @@ func TestGetAllReplicator_WithDBClosed_DatastoreClosedError(t *testing.T) { func TestLoadReplicators_WithDBClosed_DatastoreClosedError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() db.Close() @@ -625,6 +648,7 @@ func TestLoadReplicators_WithDBClosed_DatastoreClosedError(t *testing.T) { func TestLoadReplicator_WithReplicator_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -633,6 +657,7 @@ func TestLoadReplicator_WithReplicator_NoError(t *testing.T) { require.NoError(t, err) _, n2 := newTestNode(ctx, t) + defer n2.Close() err = n.Peer.SetReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -646,6 +671,7 @@ func TestLoadReplicator_WithReplicator_NoError(t *testing.T) { func TestLoadReplicator_WithReplicatorAndEmptyReplicatorMap_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -654,6 +680,7 @@ func TestLoadReplicator_WithReplicatorAndEmptyReplicatorMap_NoError(t *testing.T require.NoError(t, err) _, n2 := newTestNode(ctx, t) + defer n2.Close() err = n.Peer.SetReplicator(ctx, client.Replicator{ Info: n2.PeerInfo(), @@ -669,6 +696,7 @@ func TestLoadReplicator_WithReplicatorAndEmptyReplicatorMap_NoError(t *testing.T func TestAddP2PCollections_WithInvalidCollectionID_NotFoundError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() err := n.Peer.AddP2PCollections(ctx, []string{"invalid_collection"}) require.Error(t, err, ds.ErrNotFound) @@ -677,6 +705,7 @@ func TestAddP2PCollections_WithInvalidCollectionID_NotFoundError(t *testing.T) { func TestAddP2PCollections_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -694,6 +723,7 @@ func TestAddP2PCollections_NoError(t *testing.T) { func TestRemoveP2PCollectionsWithInvalidCollectionID(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() err := n.Peer.RemoveP2PCollections(ctx, []string{"invalid_collection"}) require.Error(t, err, ds.ErrNotFound) @@ -702,6 +732,7 @@ func TestRemoveP2PCollectionsWithInvalidCollectionID(t *testing.T) { func TestRemoveP2PCollections(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -719,6 +750,7 @@ func TestRemoveP2PCollections(t *testing.T) { func TestGetAllP2PCollectionsWithNoCollections(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() cols, err := n.Peer.GetAllP2PCollections(ctx) require.NoError(t, err) @@ -728,6 +760,7 @@ func TestGetAllP2PCollectionsWithNoCollections(t *testing.T) { func TestGetAllP2PCollections(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -749,6 +782,7 @@ func TestGetAllP2PCollections(t *testing.T) { func TestHandleDocCreateLog_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -790,6 +824,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { func TestHandleDocCreateLog_WithInvalidDockey_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() err := n.handleDocCreateLog(events.Update{ DocKey: "some-invalid-key", @@ -800,6 +835,7 @@ func TestHandleDocCreateLog_WithInvalidDockey_NoError(t *testing.T) { func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -829,6 +865,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { func TestHandleDocUpdateLog_NoError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -870,6 +907,7 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { func TestHandleDoUpdateLog_WithInvalidDockey_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() err := n.handleDocUpdateLog(events.Update{ DocKey: "some-invalid-key", @@ -880,6 +918,7 @@ func TestHandleDoUpdateLog_WithInvalidDockey_NoError(t *testing.T) { func TestHandleDocUpdateLog_WithExistingDockeyTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -923,6 +962,7 @@ func TestHandleDocUpdateLog_WithExistingDockeyTopic_TopicExistsError(t *testing. func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) + defer n.Close() _, err := db.AddSchema(ctx, `type User { name: String @@ -963,55 +1003,10 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. require.ErrorContains(t, err, "topic already exists") } -func TestPushLogToReplicator_WithReplicator_FailedPushingLogError(t *testing.T) { - ctx := context.Background() - db, n := newTestNode(ctx, t) - - _, err := db.AddSchema(ctx, `type User { - name: String - age: Int - }`) - require.NoError(t, err) - - _, n2 := newTestNode(ctx, t) - - err = n.Peer.SetReplicator(ctx, client.Replicator{ - Info: n2.PeerInfo(), - }) - require.NoError(t, err) - - col, err := db.GetCollectionByName(ctx, "User") - require.NoError(t, err) - - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) - require.NoError(t, err) - - err = col.Create(ctx, doc) - require.NoError(t, err) - - docCid, err := createCID(doc) - require.NoError(t, err) - - delta := &crdt.CompositeDAGDelta{ - SchemaVersionID: col.Schema().VersionID, - Priority: 1, - DocKey: doc.Key().Bytes(), - } - - node, err := makeNode(delta, []cid.Cid{docCid}) - require.NoError(t, err) - - n.pushLogToReplicators(ctx, events.Update{ - DocKey: doc.Key().String(), - Cid: docCid, - SchemaRoot: col.SchemaRoot(), - Block: node, - }) -} - func TestSession_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) + defer n.Close() ng := n.Session(ctx) require.Implements(t, (*ipld.NodeGetter)(nil), ng) } diff --git a/tests/integration/backup/simple/export_test.go b/tests/integration/backup/simple/export_test.go index 08c05b044a..7ee2e65bd5 100644 --- a/tests/integration/backup/simple/export_test.go +++ b/tests/integration/backup/simple/export_test.go @@ -60,7 +60,7 @@ func TestBackupExport_WithInvalidFilePath_ReturnError(t *testing.T) { Config: client.BackupConfig{ Filepath: t.TempDir() + "/some/test.json", }, - ExpectedError: "no such file or directory", + ExpectedError: "failed to create file", }, }, } diff --git a/tests/integration/backup/simple/import_test.go b/tests/integration/backup/simple/import_test.go index d7f6428725..a53760fa3e 100644 --- a/tests/integration/backup/simple/import_test.go +++ b/tests/integration/backup/simple/import_test.go @@ -48,7 +48,7 @@ func TestBackupImport_WithInvalidFilePath_ReturnError(t *testing.T) { Actions: []any{ testUtils.BackupImport{ Filepath: t.TempDir() + "/some/test.json", - ExpectedError: "no such file or directory", + ExpectedError: "failed to open file", }, }, } From 5e16b872994f22b3ad50af3241c551417940b5bf Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 22 Nov 2023 03:32:51 -0800 Subject: [PATCH 03/60] bot: Update dependencies (bulk dependabot PRs) 20-11-2023 (#2066) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2063 bot: Bump @typescript-eslint/eslint-plugin from 6.10.0 to 6.11.0 in /playground #2062 bot: Bump @vitejs/plugin-react-swc from 3.4.1 to 3.5.0 in /playground #2060 bot: Bump swagger-ui-react from 5.9.4 to 5.10.0 in /playground #2059 bot: Bump go.opentelemetry.io/otel/sdk/metric from 1.19.0 to 1.21.0 ⚠️ The following PRs had merge conflicts which were resolved manually: #2061 bot: Bump eslint from 8.53.0 to 8.54.0 in /playground #2046 bot: Bump go.opentelemetry.io/otel/metric from 1.19.0 to 1.20.0 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 12 +-- go.sum | 26 +++--- playground/package-lock.json | 174 ++++++++++------------------------- playground/package.json | 8 +- 4 files changed, 73 insertions(+), 147 deletions(-) diff --git a/go.mod b/go.mod index b9cc9ea219..759d70c2a1 100644 --- a/go.mod +++ b/go.mod @@ -43,8 +43,8 @@ require ( github.com/ugorji/go/codec v1.2.11 github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.0.0 - go.opentelemetry.io/otel/metric v1.19.0 - go.opentelemetry.io/otel/sdk/metric v1.19.0 + go.opentelemetry.io/otel/metric v1.21.0 + go.opentelemetry.io/otel/sdk/metric v1.21.0 go.uber.org/zap v1.26.0 golang.org/x/crypto v0.15.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa @@ -73,7 +73,7 @@ require ( github.com/flynn/noise v1.0.0 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect - github.com/go-logr/logr v1.2.4 // indirect + github.com/go-logr/logr v1.3.0 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect github.com/go-openapi/swag v0.22.4 // indirect @@ -173,9 +173,9 @@ require ( github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/otel v1.19.0 // indirect - go.opentelemetry.io/otel/sdk v1.19.0 // indirect - go.opentelemetry.io/otel/trace v1.19.0 // indirect + go.opentelemetry.io/otel v1.21.0 // indirect + go.opentelemetry.io/otel/sdk v1.21.0 // indirect + go.opentelemetry.io/otel/trace v1.21.0 // indirect go.uber.org/dig v1.17.1 // indirect go.uber.org/fx v1.20.1 // indirect go.uber.org/mock v0.3.0 // indirect diff --git a/go.sum b/go.sum index da987e8434..dff217c361 100644 --- a/go.sum +++ b/go.sum @@ -153,8 +153,8 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.2.4 h1:g01GSCwiDw2xSZfjJ2/T9M+S6pFdcNtFYsp+Y43HYDQ= -github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= +github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= @@ -226,7 +226,7 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF8= @@ -641,16 +641,16 @@ go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/otel v1.19.0 h1:MuS/TNf4/j4IXsZuJegVzI1cwut7Qc00344rgH7p8bs= -go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY= -go.opentelemetry.io/otel/metric v1.19.0 h1:aTzpGtV0ar9wlV4Sna9sdJyII5jTVJEvKETPiOKwvpE= -go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8= -go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= -go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A= -go.opentelemetry.io/otel/sdk/metric v1.19.0 h1:EJoTO5qysMsYCa+w4UghwFV/ptQgqSL/8Ni+hx+8i1k= -go.opentelemetry.io/otel/sdk/metric v1.19.0/go.mod h1:XjG0jQyFJrv2PbMvwND7LwCEhsJzCzV5210euduKcKY= -go.opentelemetry.io/otel/trace v1.19.0 h1:DFVQmlVbfVeOuBRrwdtaehRrWiL1JoVs9CPIQ1Dzxpg= -go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo= +go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc= +go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo= +go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= +go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= +go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8= +go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6nwBnZIxl/E= +go.opentelemetry.io/otel/sdk/metric v1.21.0 h1:smhI5oD714d6jHE6Tie36fPx4WDFIg+Y6RfAY4ICcR0= +go.opentelemetry.io/otel/sdk/metric v1.21.0/go.mod h1:FJ8RAsoPGv/wYMgBdUJXOm+6pzFY3YdljnXtv1SBE8Q= +go.opentelemetry.io/otel/trace v1.21.0 h1:WD9i5gzvoUPuXIXH24ZNBudiarZDKuekPqi/E8fpfLc= +go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= diff --git a/playground/package-lock.json b/playground/package-lock.json index f3252de4aa..ab1c686bf6 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,16 +12,16 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.9.4" + "swagger-ui-react": "^5.10.0" }, "devDependencies": { "@types/react": "^18.2.37", "@types/react-dom": "^18.2.15", "@types/swagger-ui-react": "^4.18.1", - "@typescript-eslint/eslint-plugin": "^6.10.0", + "@typescript-eslint/eslint-plugin": "^6.11.0", "@typescript-eslint/parser": "^6.11.0", - "@vitejs/plugin-react-swc": "^3.4.1", - "eslint": "^8.53.0", + "@vitejs/plugin-react-swc": "^3.5.0", + "eslint": "^8.54.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.4", "typescript": "^5.2.2", @@ -511,9 +511,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.53.0.tgz", - "integrity": "sha512-Kn7K8dx/5U6+cT1yEhpX1w4PCSg0M+XyRILPgvwcEBjerFWCwQj5sbr3/VmxqV0JGHCBCzyd6LxypEuehypY1w==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.54.0.tgz", + "integrity": "sha512-ut5V+D+fOoWPgGGNj83GGjnntO39xDy6DWxO0wb7Jp3DcMX0TfIqdzHF85VTQkerdyGmuuMD9AKAo5KiNlf/AQ==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -2138,16 +2138,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.10.0.tgz", - "integrity": "sha512-uoLj4g2OTL8rfUQVx2AFO1hp/zja1wABJq77P6IclQs6I/m9GLrm7jCdgzZkvWdDCQf1uEvoa8s8CupsgWQgVg==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.11.0.tgz", + "integrity": "sha512-uXnpZDc4VRjY4iuypDBKzW1rz9T5YBBK0snMn8MaTSNd2kMlj50LnLBABELjJiOL5YHk7ZD8hbSpI9ubzqYI0w==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.10.0", - "@typescript-eslint/type-utils": "6.10.0", - "@typescript-eslint/utils": "6.10.0", - "@typescript-eslint/visitor-keys": "6.10.0", + "@typescript-eslint/scope-manager": "6.11.0", + "@typescript-eslint/type-utils": "6.11.0", + "@typescript-eslint/utils": "6.11.0", + "@typescript-eslint/visitor-keys": "6.11.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2200,7 +2200,7 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "node_modules/@typescript-eslint/scope-manager": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.11.0.tgz", "integrity": "sha512-0A8KoVvIURG4uhxAdjSaxy8RdRE//HztaZdG8KiHLP8WOXSk0vlF7Pvogv+vlJA5Rnjj/wDcFENvDaHb+gKd1A==", @@ -2217,88 +2217,14 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.11.0.tgz", - "integrity": "sha512-ZbEzuD4DwEJxwPqhv3QULlRj8KYTAnNsXxmfuUXFCxZmO6CF2gM/y+ugBSAQhrqaJL3M+oe4owdWunaHM6beqA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.11.0.tgz", - "integrity": "sha512-Aezzv1o2tWJwvZhedzvD5Yv7+Lpu1by/U1LZ5gLc4tCx8jUmuSCMioPFRjliN/6SJIvY6HpTtJIWubKuYYYesQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.11.0", - "@typescript-eslint/visitor-keys": "6.11.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.11.0.tgz", - "integrity": "sha512-+SUN/W7WjBr05uRxPggJPSzyB8zUpaYo2hByKasWbqr3PM8AXfZt8UHdNpBS1v9SA62qnSSMF3380SwDqqprgQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.11.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.10.0.tgz", - "integrity": "sha512-TN/plV7dzqqC2iPNf1KrxozDgZs53Gfgg5ZHyw8erd6jd5Ta/JIEcdCheXFt9b1NYb93a1wmIIVW/2gLkombDg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.10.0", - "@typescript-eslint/visitor-keys": "6.10.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.10.0.tgz", - "integrity": "sha512-wYpPs3hgTFblMYwbYWPT3eZtaDOjbLyIYuqpwuLBBqhLiuvJ+9sEp2gNRJEtR5N/c9G1uTtQQL5AhV0fEPJYcg==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.11.0.tgz", + "integrity": "sha512-nA4IOXwZtqBjIoYrJcYxLRO+F9ri+leVGoJcMW1uqr4r1Hq7vW5cyWrA43lFbpRvQ9XgNrnfLpIkO3i1emDBIA==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.10.0", - "@typescript-eslint/utils": "6.10.0", + "@typescript-eslint/typescript-estree": "6.11.0", + "@typescript-eslint/utils": "6.11.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2319,9 +2245,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.10.0.tgz", - "integrity": "sha512-36Fq1PWh9dusgo3vH7qmQAj5/AZqARky1Wi6WpINxB6SkQdY5vQoT2/7rW7uBIsPDcvvGCLi4r10p0OJ7ITAeg==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.11.0.tgz", + "integrity": "sha512-ZbEzuD4DwEJxwPqhv3QULlRj8KYTAnNsXxmfuUXFCxZmO6CF2gM/y+ugBSAQhrqaJL3M+oe4owdWunaHM6beqA==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2332,13 +2258,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.10.0.tgz", - "integrity": "sha512-ek0Eyuy6P15LJVeghbWhSrBCj/vJpPXXR+EpaRZqou7achUWL8IdYnMSC5WHAeTWswYQuP2hAZgij/bC9fanBg==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.11.0.tgz", + "integrity": "sha512-Aezzv1o2tWJwvZhedzvD5Yv7+Lpu1by/U1LZ5gLc4tCx8jUmuSCMioPFRjliN/6SJIvY6HpTtJIWubKuYYYesQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.10.0", - "@typescript-eslint/visitor-keys": "6.10.0", + "@typescript-eslint/types": "6.11.0", + "@typescript-eslint/visitor-keys": "6.11.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2359,17 +2285,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.10.0.tgz", - "integrity": "sha512-v+pJ1/RcVyRc0o4wAGux9x42RHmAjIGzPRo538Z8M1tVx6HOnoQBCX/NoadHQlZeC+QO2yr4nNSFWOoraZCAyg==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.11.0.tgz", + "integrity": "sha512-p23ibf68fxoZy605dc0dQAEoUsoiNoP3MD9WQGiHLDuTSOuqoTsa4oAy+h3KDkTcxbbfOtUjb9h3Ta0gT4ug2g==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.10.0", - "@typescript-eslint/types": "6.10.0", - "@typescript-eslint/typescript-estree": "6.10.0", + "@typescript-eslint/scope-manager": "6.11.0", + "@typescript-eslint/types": "6.11.0", + "@typescript-eslint/typescript-estree": "6.11.0", "semver": "^7.5.4" }, "engines": { @@ -2384,12 +2310,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.10.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.10.0.tgz", - "integrity": "sha512-xMGluxQIEtOM7bqFCo+rCMh5fqI+ZxV5RUUOa29iVPz1OgCZrtc7rFnz5cLUazlkPKYqX+75iuDq7m0HQ48nCg==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.11.0.tgz", + "integrity": "sha512-+SUN/W7WjBr05uRxPggJPSzyB8zUpaYo2hByKasWbqr3PM8AXfZt8UHdNpBS1v9SA62qnSSMF3380SwDqqprgQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.10.0", + "@typescript-eslint/types": "6.11.0", "eslint-visitor-keys": "^3.4.1" }, "engines": { @@ -2407,15 +2333,15 @@ "dev": true }, "node_modules/@vitejs/plugin-react-swc": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.4.1.tgz", - "integrity": "sha512-7YQOQcVV5x1luD8nkbCDdyYygFvn1hjqJk68UvNAzY2QG4o4N5EwAhLLFNOcd1HrdMwDl0VElP8VutoWf9IvJg==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react-swc/-/plugin-react-swc-3.5.0.tgz", + "integrity": "sha512-1PrOvAaDpqlCV+Up8RkAh9qaiUjoDUcjtttyhXDKw53XA6Ve16SOp6cCOpRs8Dj8DqUQs6eTW5YkLcLJjrXAig==", "dev": true, "dependencies": { - "@swc/core": "^1.3.95" + "@swc/core": "^1.3.96" }, "peerDependencies": { - "vite": "^4" + "vite": "^4 || ^5" } }, "node_modules/@yarnpkg/lockfile": { @@ -3030,15 +2956,15 @@ } }, "node_modules/eslint": { - "version": "8.53.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.53.0.tgz", - "integrity": "sha512-N4VuiPjXDUa4xVeV/GC/RV3hQW9Nw+Y463lkWaKKXKYMvmRiRDAtfpuPFLN+E1/6ZhyR8J2ig+eVREnYgUsiag==", + "version": "8.54.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.54.0.tgz", + "integrity": "sha512-NY0DfAkM8BIZDVl6PgSa1ttZbx3xHgJzSNJKYcQglem6CppHyMhRIQkBVSSMaSRnLhig3jsDbEzOjwCVt4AmmA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.3", - "@eslint/js": "8.53.0", + "@eslint/js": "8.54.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -5491,9 +5417,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.9.4", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.9.4.tgz", - "integrity": "sha512-VEY+QuNtRkidR/Os81zq22TpbXkfFDJ2pB6UF+J5sgrf2zksPr2oQGADpWn6RkYXXudZcalXUjcUqgx8WgIfaA==", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.10.0.tgz", + "integrity": "sha512-2iWvBRtamhTmr6W0Kj0N1DYy5iuYZcQ2HIv3lEscyoWfn+Tomy5aLZ3Z4LrB5QpNsMebmix9AoDgGFZaqcYPWA==", "dependencies": { "@babel/runtime-corejs3": "^7.23.2", "@braintree/sanitize-url": "=6.0.4", diff --git a/playground/package.json b/playground/package.json index 7e9aacc0e6..ada925ab2b 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,16 +14,16 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.9.4" + "swagger-ui-react": "^5.10.0" }, "devDependencies": { "@types/react": "^18.2.37", "@types/react-dom": "^18.2.15", "@types/swagger-ui-react": "^4.18.1", - "@typescript-eslint/eslint-plugin": "^6.10.0", + "@typescript-eslint/eslint-plugin": "^6.11.0", "@typescript-eslint/parser": "^6.11.0", - "@vitejs/plugin-react-swc": "^3.4.1", - "eslint": "^8.53.0", + "@vitejs/plugin-react-swc": "^3.5.0", + "eslint": "^8.54.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.4", "typescript": "^5.2.2", From 57ae8f553a4507418233b37cb92288c5dbc318d0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 22 Nov 2023 21:35:56 -0800 Subject: [PATCH 04/60] bot: Bump @typescript-eslint/eslint-plugin from 6.11.0 to 6.12.0 in /playground (#2068) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) from 6.11.0 to 6.12.0.
Release notes

Sourced from @​typescript-eslint/eslint-plugin's releases.

v6.12.0

6.12.0 (2023-11-20)

Bug Fixes

  • eslint-plugin: [class-methods-use-this] detect a problematic case for private/protected members if ignoreClassesThatImplementAnInterface is set (#7705) (155aa1f)
  • eslint-plugin: [no-unnecessary-condition] fix false positive with computed member access and branded key type (#7706) (f151b26)
  • eslint-plugin: [switch-exhaustiveness-check] enum members with new line or single quotes are not being fixed correctly (#7806) (a034d0a), closes #7768
  • utils: add missing fields to flat config types (#7933) (533861a)
  • utils: allow string processor in flat config (024ed9e)

Features

  • [member-ordering] add accessor support for member-ordering (#7927) (3c8312d)
  • eslint-plugin: [switch-exhaustiveness-check] add requireDefaultForNonUnion option (#7880) (4cfcd45)
  • update TypeScript to 5.3-rc (#7923) (9034d17)

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/eslint-plugin's changelog.

6.12.0 (2023-11-20)

Bug Fixes

  • eslint-plugin: [class-methods-use-this] detect a problematic case for private/protected members if ignoreClassesThatImplementAnInterface is set (#7705) (155aa1f)
  • eslint-plugin: [no-unnecessary-condition] fix false positive with computed member access and branded key type (#7706) (f151b26)
  • eslint-plugin: [switch-exhaustiveness-check] enum members with new line or single quotes are not being fixed correctly (#7806) (a034d0a), closes #7768

Features

  • [member-ordering] add accessor support for member-ordering (#7927) (3c8312d)
  • eslint-plugin: [switch-exhaustiveness-check] add requireDefaultForNonUnion option (#7880) (4cfcd45)

You can read about our versioning strategy and releases on our website.

Commits
  • 9093e95 chore: publish v6.12.0
  • 2e535b8 chore(deps): update dependency prettier to v3.1.0 (#7938)
  • 4cfcd45 feat(eslint-plugin): [switch-exhaustiveness-check] add requireDefaultForNonUn...
  • 155aa1f fix(eslint-plugin): [class-methods-use-this] detect a problematic case for pr...
  • f151b26 fix(eslint-plugin): [no-unnecessary-condition] fix false positive with comput...
  • a034d0a fix(eslint-plugin): [switch-exhaustiveness-check] enum members with new line ...
  • 3c8312d feat: [member-ordering] add accessor support for member-ordering (#7927)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/eslint-plugin&package-manager=npm_and_yarn&previous-version=6.11.0&new-version=6.12.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 222 +++++++++++++++++++++++++++++++---- playground/package.json | 2 +- 2 files changed, 201 insertions(+), 23 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index ab1c686bf6..e1facbebbe 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,7 +18,7 @@ "@types/react": "^18.2.37", "@types/react-dom": "^18.2.15", "@types/swagger-ui-react": "^4.18.1", - "@typescript-eslint/eslint-plugin": "^6.11.0", + "@typescript-eslint/eslint-plugin": "^6.12.0", "@typescript-eslint/parser": "^6.11.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.54.0", @@ -2105,9 +2105,9 @@ "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==" }, "node_modules/@types/semver": { - "version": "7.5.5", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.5.tgz", - "integrity": "sha512-+d+WYC1BxJ6yVOgUgzK8gWvp5qF8ssV5r4nsDcZWKRWcDQLQ619tvWAxJQYGgBrO1MnLJC7a5GtiYsAoQ47dJg==", + "version": "7.5.6", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.6.tgz", + "integrity": "sha512-dn1l8LaMea/IjDoHNd9J52uBbInB796CDffS6VdIxvqYCPSG0V0DzHp76GpaWnlhg88uYyPbXCDIowa86ybd5A==", "dev": true }, "node_modules/@types/swagger-ui-react": { @@ -2138,16 +2138,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.11.0.tgz", - "integrity": "sha512-uXnpZDc4VRjY4iuypDBKzW1rz9T5YBBK0snMn8MaTSNd2kMlj50LnLBABELjJiOL5YHk7ZD8hbSpI9ubzqYI0w==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.12.0.tgz", + "integrity": "sha512-XOpZ3IyJUIV1b15M7HVOpgQxPPF7lGXgsfcEIu3yDxFPaf/xZKt7s9QO/pbk7vpWQyVulpJbu4E5LwpZiQo4kA==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.11.0", - "@typescript-eslint/type-utils": "6.11.0", - "@typescript-eslint/utils": "6.11.0", - "@typescript-eslint/visitor-keys": "6.11.0", + "@typescript-eslint/scope-manager": "6.12.0", + "@typescript-eslint/type-utils": "6.12.0", + "@typescript-eslint/utils": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2172,6 +2172,53 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", + "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", + "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", + "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/parser": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.11.0.tgz", @@ -2218,13 +2265,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.11.0.tgz", - "integrity": "sha512-nA4IOXwZtqBjIoYrJcYxLRO+F9ri+leVGoJcMW1uqr4r1Hq7vW5cyWrA43lFbpRvQ9XgNrnfLpIkO3i1emDBIA==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.12.0.tgz", + "integrity": "sha512-WWmRXxhm1X8Wlquj+MhsAG4dU/Blvf1xDgGaYCzfvStP2NwPQh6KBvCDbiOEvaE0filhranjIlK/2fSTVwtBng==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.11.0", - "@typescript-eslint/utils": "6.11.0", + "@typescript-eslint/typescript-estree": "6.12.0", + "@typescript-eslint/utils": "6.12.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2244,6 +2291,63 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", + "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", + "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", + "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/types": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.11.0.tgz", @@ -2285,17 +2389,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.11.0.tgz", - "integrity": "sha512-p23ibf68fxoZy605dc0dQAEoUsoiNoP3MD9WQGiHLDuTSOuqoTsa4oAy+h3KDkTcxbbfOtUjb9h3Ta0gT4ug2g==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.12.0.tgz", + "integrity": "sha512-LywPm8h3tGEbgfyjYnu3dauZ0U7R60m+miXgKcZS8c7QALO9uWJdvNoP+duKTk2XMWc7/Q3d/QiCuLN9X6SWyQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.11.0", - "@typescript-eslint/types": "6.11.0", - "@typescript-eslint/typescript-estree": "6.11.0", + "@typescript-eslint/scope-manager": "6.12.0", + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/typescript-estree": "6.12.0", "semver": "^7.5.4" }, "engines": { @@ -2309,6 +2413,80 @@ "eslint": "^7.0.0 || ^8.0.0" } }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", + "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", + "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", + "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", + "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.12.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/visitor-keys": { "version": "6.11.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.11.0.tgz", diff --git a/playground/package.json b/playground/package.json index ada925ab2b..6a234b2674 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,7 +20,7 @@ "@types/react": "^18.2.37", "@types/react-dom": "^18.2.15", "@types/swagger-ui-react": "^4.18.1", - "@typescript-eslint/eslint-plugin": "^6.11.0", + "@typescript-eslint/eslint-plugin": "^6.12.0", "@typescript-eslint/parser": "^6.11.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.54.0", From d7e00d185b44b01d69ecea2ff1969c105e36de92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 23 Nov 2023 12:32:42 -0800 Subject: [PATCH 05/60] bot: Bump swagger-ui-react from 5.10.0 to 5.10.3 in /playground (#2067) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [swagger-ui-react](https://github.com/swagger-api/swagger-ui) from 5.10.0 to 5.10.3.
Release notes

Sourced from swagger-ui-react's releases.

Swagger UI v5.10.3 Released!

5.10.3 (2023-11-22)

Bug Fixes

  • auth: allow password managers to pre-fill input fields (#9390) (9a7c4c0)

Swagger UI v5.10.2 Released!

5.10.2 (2023-11-22)

Bug Fixes

Swagger UI v5.10.1 Released!

5.10.1 (2023-11-20)

Bug Fixes

WARNING: this is a failed release. Most of the release fragments failed to be released. Please ignore this release.

Commits
  • c33fe65 chore(release): cut the v5.10.3 release
  • 9a7c4c0 fix(auth): allow password managers to pre-fill input fields (#9390)
  • fc8e00c chore(release): cut the v5.10.2 release
  • d2cdd89 chore(release): manual release bump to v5.10.1 (#9399)
  • 1f81a1f chore(deps-dev): bump @​commitlint/config-conventional (#9398)
  • fb5f1ff chore(deps-dev): bump @​babel/plugin-transform-runtime (#9397)
  • a22d265 chore(deps-dev): bump @​commitlint/cli from 18.4.2 to 18.4.3 (#9396)
  • dfeb727 fix(release): fix previously failed v5.10.1 release (#9395)
  • 8f93354 fix(styling): ensure authorize button stays on the right (#9387)
  • 3238718 chore(deps-dev): bump eslint from 8.53.0 to 8.54.0 (#9393)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=swagger-ui-react&package-manager=npm_and_yarn&previous-version=5.10.0&new-version=5.10.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index e1facbebbe..6b67161990 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,7 +12,7 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.10.0" + "swagger-ui-react": "^5.10.3" }, "devDependencies": { "@types/react": "^18.2.37", @@ -5595,9 +5595,9 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.10.0.tgz", - "integrity": "sha512-2iWvBRtamhTmr6W0Kj0N1DYy5iuYZcQ2HIv3lEscyoWfn+Tomy5aLZ3Z4LrB5QpNsMebmix9AoDgGFZaqcYPWA==", + "version": "5.10.3", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.10.3.tgz", + "integrity": "sha512-AB/ko3xD76wyCFbfb5zihy8Gacg7Lz62umzcmBLC/+VN8twib4ayWNZ48lTRh6Kb9vitvEQCDM/4VS2uTwwy0w==", "dependencies": { "@babel/runtime-corejs3": "^7.23.2", "@braintree/sanitize-url": "=6.0.4", diff --git a/playground/package.json b/playground/package.json index 6a234b2674..e69479faf4 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,7 +14,7 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.10.0" + "swagger-ui-react": "^5.10.3" }, "devDependencies": { "@types/react": "^18.2.37", From 8d8e1111edf1a01c1c929446118f992ac151e501 Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Mon, 27 Nov 2023 21:45:09 +0100 Subject: [PATCH 06/60] test: Add auto-doc generation (#2051) ## Relevant issue(s) Resolves #1974 ## Description This pull request introduces `gen.AutoGenerateDocs` function, a dynamic and versatile tool designed for the automatic generation of documents based on specified collection definitions. This functionality is essential for testing and simulating various data models and scenarios, providing extensive customization and intelligent demand calculation for document generation. --- client/descriptions.go | 27 +- tests/gen/README.md | 99 ++ tests/gen/defs.go | 23 + tests/gen/errors.go | 45 + tests/gen/gen_auto.go | 242 +++ tests/gen/gen_auto_config.go | 172 +++ tests/gen/gen_auto_configurator.go | 441 ++++++ tests/gen/gen_auto_option.go | 69 + tests/gen/gen_auto_test.go | 1304 +++++++++++++++++ tests/gen/schema_parser.go | 195 +++ tests/gen/schema_parser_test.go | 297 ++++ tests/integration/index/docs.go | 533 ++++--- .../index/query_performance_test.go | 58 +- .../query_with_index_combined_filter_test.go | 34 +- .../query_with_index_only_filter_test.go | 206 ++- .../index/query_with_relation_filter_test.go | 206 ++- tests/integration/index/utils.go | 290 ---- tests/integration/test_case.go | 28 + tests/integration/utils2.go | 98 +- tests/predefined/README.md | 56 + tests/predefined/defs.go | 34 + tests/predefined/errors.go | 21 + tests/predefined/gen_predefined.go | 251 ++++ tests/predefined/gen_predefined_test.go | 508 +++++++ tests/predefined/util_test.go | 89 ++ 25 files changed, 4545 insertions(+), 781 deletions(-) create mode 100644 tests/gen/README.md create mode 100644 tests/gen/defs.go create mode 100644 tests/gen/errors.go create mode 100644 tests/gen/gen_auto.go create mode 100644 tests/gen/gen_auto_config.go create mode 100644 tests/gen/gen_auto_configurator.go create mode 100644 tests/gen/gen_auto_option.go create mode 100644 tests/gen/gen_auto_test.go create mode 100644 tests/gen/schema_parser.go create mode 100644 tests/gen/schema_parser_test.go delete mode 100644 tests/integration/index/utils.go create mode 100644 tests/predefined/README.md create mode 100644 tests/predefined/defs.go create mode 100644 tests/predefined/errors.go create mode 100644 tests/predefined/gen_predefined.go create mode 100644 tests/predefined/gen_predefined_test.go create mode 100644 tests/predefined/util_test.go diff --git a/client/descriptions.go b/client/descriptions.go index efe470114d..f9a262e438 100644 --- a/client/descriptions.go +++ b/client/descriptions.go @@ -162,7 +162,7 @@ const ( FieldKind_FLOAT FieldKind = 6 FieldKind_FLOAT_ARRAY FieldKind = 7 _ FieldKind = 8 // safe to repurpose (was never used) - _ FieldKind = 9 // safe to repurpose (previoulsy old field) + _ FieldKind = 9 // safe to repurpose (previously old field) FieldKind_DATETIME FieldKind = 10 FieldKind_STRING FieldKind = 11 FieldKind_STRING_ARRAY FieldKind = 12 @@ -186,9 +186,9 @@ const ( // their enum values. // // It is currently used to by [db.PatchSchema] to allow string representations of -// [FieldKind] to be provided instead of their raw int values. This useage may expand +// [FieldKind] to be provided instead of their raw int values. This usage may expand // in the future. They currently roughly correspond to the GQL field types, but this -// equality is not guarenteed. +// equality is not guaranteed. var FieldKindStringToEnumMapping = map[string]FieldKind{ "ID": FieldKind_DocKey, "Boolean": FieldKind_BOOL, @@ -238,7 +238,7 @@ type FieldDescription struct { // ID contains the internal ID of this field. // // Whilst this ID will typically match the field's index within the Schema's Fields - // slice, there is no guarentee that they will be the same. + // slice, there is no guarantee that they will be the same. // // It is immutable. ID FieldID @@ -287,6 +287,25 @@ func (f FieldDescription) IsPrimaryRelation() bool { return f.RelationType > 0 && f.RelationType&Relation_Type_Primary != 0 } +// IsRelation returns true if this field is a relation. +func (f FieldDescription) IsRelation() bool { + return f.RelationType > 0 +} + +// IsArray returns true if this field is an array type which includes inline arrays as well +// as relation arrays. +func (f FieldDescription) IsArray() bool { + return f.Kind == FieldKind_BOOL_ARRAY || + f.Kind == FieldKind_INT_ARRAY || + f.Kind == FieldKind_FLOAT_ARRAY || + f.Kind == FieldKind_STRING_ARRAY || + f.Kind == FieldKind_FOREIGN_OBJECT_ARRAY || + f.Kind == FieldKind_NILLABLE_BOOL_ARRAY || + f.Kind == FieldKind_NILLABLE_INT_ARRAY || + f.Kind == FieldKind_NILLABLE_FLOAT_ARRAY || + f.Kind == FieldKind_NILLABLE_STRING_ARRAY +} + // IsSet returns true if the target relation type is set. func (m RelationType) IsSet(target RelationType) bool { return m&target > 0 diff --git a/tests/gen/README.md b/tests/gen/README.md new file mode 100644 index 0000000000..8287156522 --- /dev/null +++ b/tests/gen/README.md @@ -0,0 +1,99 @@ +# Automatic Documents Generation + +`AutoGenerate` and `AutoGenerateFromSDL` are a highly versatile function designed for dynamic document generation, perfect for testing and simulation purposes. + +`AutoGenerateFromSDL` creates documents based on a specified GQL SDL, which may contain multiple schema/collection definitions, allowing for extensive customization of data generation. + +The function generates documents adhering to a defined collection and it's configuration. +It interprets the types and relationships within the collection to create realistic, interconnected data structures. + +`AutoGenerate` creates documents based on the provider collections' definitions (`[]client.CollectionDefinition`) + +### Demand Calculation: + +The functions calculate the 'demand' or the number of documents to generate based on the configuration provided. +For related types within the collection set, it intelligently adjusts the number of generated documents to maintain consistency in relationships (one-to-one, one-to-many, etc.). + +In the absence of explicit demands, it deduces demands from the maximum required by related types or uses a default value if no relation-based demands are present. + +The error will be returned if the demand for documents can not be satisfied. +For example, a document expects at least 10 secondary documents, but the demand for secondary documents is 5. + +## Configuration + +Both functions `AutoGenerate` and `AutoGenerateFromSDL` can be configured using options. + +Additionally, `AutoGenerateFromSDL` can be configured directly within the schema using annotations. +Options take precedence over in-schema configurations. + +### In-schema Configuration: + +Field values can be configured directly within the SDL doc using annotations after "#" (e.g., `# min: 1, max: 120` for an integer field). + +At the moment, the following value configurations are supported: +- `min` and `max` for integer, float and relation fields. For relation fields, the values define the minimum and maximum number of related documents. +- `len` for string fields + +Default value ranges are used when not explicitly set in the schema or via options. + +### Customization with Options: + +- `WithTypeDemand` and `WithTypeDemandRange` allow setting the specific number (or range) of documents for a given type. +- `WithFieldRange` and `WithFieldLen` override in-schema configurations for field ranges and lengths. +- `WithFieldGenerator` provides custom value generation logic for specific fields. +- `WithRandomSeed` ensures deterministic output, useful for repeatable tests. + +## Examples + +### Basic Document Generation: + +```go +sdl := ` +type User { + name: String # len: 10 + age: Int # min: 18, max: 50 + verified: Boolean + rating: Float # min: 0.0, max: 5.0 +}` +docs, _ := AutoGenerateFromSDL(sdl, WithTypeDemand("User", 100)) +``` + +### Custom Field Range: + +Overrides the age range specified in the SDL doc. + +```go +docs, _ := AutoGenerateFromSDL(sdl, WithTypeDemand("User", 50), WithFieldRange("User", "age", 25, 30)) +``` + +### One-to-Many Relationship: + +Generates User documents each related to multiple Device documents. + +```go +sdl := ` +type User { + name: String + devices: [Device] # min: 1, max: 3 +} +type Device { + model: String + owner: User +}` +docs, _ := AutoGenerateFromSDL(sdl, WithTypeDemand("User", 10)) +``` + +### Custom Value Generation: + +Custom generation for age field. + +```go +nameWithPrefix := func(i int, next func() any) any { + return "user_" + next().(string) +} +docs, _ := AutoGenerateFromSDL(sdl, WithTypeDemand("User", 10), WithFieldGenerator("User", "name", nameWithPrefix)) +``` + +## Conclusion + +`AutoGenerateFromSDL` is a powerful tool for generating structured, relational data on the fly. Its flexibility in configuration and intelligent demand calculation makes it ideal for testing complex data models and scenarios. diff --git a/tests/gen/defs.go b/tests/gen/defs.go new file mode 100644 index 0000000000..0c575e7001 --- /dev/null +++ b/tests/gen/defs.go @@ -0,0 +1,23 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "github.com/sourcenetwork/defradb/client" +) + +// GeneratedDoc is a document generated by a document generator. +type GeneratedDoc struct { + // Col is the collection definition that the document belongs to. + Col *client.CollectionDefinition + // Doc is the document generated. + Doc *client.Document +} diff --git a/tests/gen/errors.go b/tests/gen/errors.go new file mode 100644 index 0000000000..aa052b71fd --- /dev/null +++ b/tests/gen/errors.go @@ -0,0 +1,45 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import "github.com/sourcenetwork/defradb/errors" + +const ( + errInvalidConfiguration string = "invalid configuration" + errCanNotSupplyTypeDemand string = "can not supply demand for type " + errFailedToParse string = "failed to parse schema" + errFailedToGenerateDoc string = "failed to generate doc" + errIncompleteColDefinition string = "incomplete collection definition" +) + +func NewErrInvalidConfiguration(reason string) error { + return errors.New(errInvalidConfiguration, errors.NewKV("Reason", reason)) +} + +func NewErrCanNotSupplyTypeDemand(typeName string) error { + return errors.New(errCanNotSupplyTypeDemand, errors.NewKV("Type", typeName)) +} + +func NewErrFailedToParse(reason string) error { + return errors.New(errFailedToParse, errors.NewKV("Reason", reason)) +} + +func NewErrFailedToGenerateDoc(inner error) error { + return errors.Wrap(errFailedToGenerateDoc, inner) +} + +func NewErrIncompleteColDefinition(reason string) error { + return errors.New(errIncompleteColDefinition, errors.NewKV("Reason", reason)) +} + +func newNotDefinedTypeErr(typeName string) error { + return NewErrInvalidConfiguration("type " + typeName + " is not defined in the schema") +} diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go new file mode 100644 index 0000000000..52ea3148e5 --- /dev/null +++ b/tests/gen/gen_auto.go @@ -0,0 +1,242 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "math/rand" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" +) + +const ( + // DefaultNumDocs is the default number of documents to generate for a collection. + DefaultNumDocs = 10 + // DefaultNumChildrenPerDoc is the default number of children to generate for a document. + DefaultNumChildrenPerDoc = 2 + + // DefaultStrLen is the default length of a string to generate. + DefaultStrLen = 10 + // DefaultIntMin is the default minimum value of an integer to generate. + DefaultIntMin = 0 + // DefaultIntMax is the default maximum value of an integer to generate. + DefaultIntMax = 10000 +) + +// AutoGenerateFromSDL generates random documents from a GraphQL SDL. +func AutoGenerateFromSDL(gqlSDL string, options ...Option) ([]GeneratedDoc, error) { + genConfigs, err := parseConfig(gqlSDL) + if err != nil { + return nil, err + } + typeDefs, err := parseSDL(gqlSDL) + if err != nil { + return nil, err + } + generator := newRandomDocGenerator(typeDefs, genConfigs) + return generator.generateDocs(options...) +} + +// AutoGenerate generates random documents from collection definitions. +func AutoGenerate(definitions []client.CollectionDefinition, options ...Option) ([]GeneratedDoc, error) { + err := validateDefinitions(definitions) + if err != nil { + return nil, err + } + typeDefs := make(map[string]client.CollectionDefinition) + for _, def := range definitions { + typeDefs[def.Description.Name] = def + } + generator := newRandomDocGenerator(typeDefs, nil) + return generator.generateDocs(options...) +} + +func newRandomDocGenerator(types map[string]client.CollectionDefinition, config configsMap) *randomDocGenerator { + if config == nil { + config = make(configsMap) + } + configurator := newDocGenConfigurator(types, config) + return &randomDocGenerator{ + configurator: configurator, + generatedDocs: make(map[string][]genDoc), + } +} + +type genDoc struct { + // the dockey of the document. Its cached value from doc.Key().String() just to avoid + // calculating it multiple times. + docKey string + doc *client.Document +} + +type randomDocGenerator struct { + configurator docsGenConfigurator + + generatedDocs map[string][]genDoc + random rand.Rand +} + +func (g *randomDocGenerator) generateDocs(options ...Option) ([]GeneratedDoc, error) { + err := g.configurator.Configure(options...) + if err != nil { + return nil, err + } + + g.random = *g.configurator.random + + resultDocs := make([]GeneratedDoc, 0, g.getMaxTotalDemand()) + err = g.generateRandomDocs(g.configurator.typesOrder) + if err != nil { + return nil, err + } + for _, colName := range g.configurator.typesOrder { + typeDef := g.configurator.types[colName] + for _, doc := range g.generatedDocs[colName] { + resultDocs = append(resultDocs, GeneratedDoc{ + Col: &typeDef, + Doc: doc.doc, + }) + } + } + return resultDocs, nil +} + +func (g *randomDocGenerator) getMaxTotalDemand() int { + totalDemand := 0 + for _, demand := range g.configurator.docsDemand { + totalDemand += demand.max + } + return totalDemand +} + +// getNextPrimaryDocKey returns the key of the next primary document to be used as a relation. +func (g *randomDocGenerator) getNextPrimaryDocKey(secondaryType string, field *client.FieldDescription) string { + ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) + return g.generatedDocs[field.Schema][ind].docKey +} + +func (g *randomDocGenerator) generateRandomDocs(order []string) error { + for _, typeName := range order { + typeDef := g.configurator.types[typeName] + + currentTypeDemand := g.configurator.docsDemand[typeName] + // we need to decide how many documents to generate in total for this type + // and if it's a range (say, 10-30) we take average (20). + totalDemand := currentTypeDemand.getAverage() + for i := 0; i < totalDemand; i++ { + newDoc := make(map[string]any) + for _, field := range typeDef.Schema.Fields { + if field.Name == request.KeyFieldName { + continue + } + if field.IsRelation() { + if field.IsPrimaryRelation() { + newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocKey(typeName, &field) + } + } else { + fieldConf := g.configurator.config.ForField(typeName, field.Name) + newDoc[field.Name] = g.generateRandomValue(typeName, field.Kind, fieldConf) + } + } + doc, err := client.NewDocFromMap(newDoc) + if err != nil { + return err + } + g.generatedDocs[typeName] = append(g.generatedDocs[typeName], + genDoc{docKey: doc.Key().String(), doc: doc}) + } + } + return nil +} + +func getRandomString(random *rand.Rand, n int) string { + const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + b := make([]byte, n) + for i := range b { + b[i] = letterBytes[random.Intn(len(letterBytes))] + } + return string(b) +} + +func (g *randomDocGenerator) generateRandomValue( + typeName string, + fieldKind client.FieldKind, + fieldConfig genConfig, +) any { + genVal := g.getValueGenerator(fieldKind, fieldConfig) + if fieldConfig.fieldGenerator != nil { + return fieldConfig.fieldGenerator(len(g.generatedDocs[typeName]), genVal) + } + return genVal() +} + +func (g *randomDocGenerator) getValueGenerator(fieldKind client.FieldKind, fieldConfig genConfig) func() any { + switch fieldKind { + case client.FieldKind_STRING: + strLen := DefaultStrLen + if prop, ok := fieldConfig.props["len"]; ok { + strLen = prop.(int) + } + return func() any { return getRandomString(&g.random, strLen) } + case client.FieldKind_INT: + min, max := getMinMaxOrDefault(fieldConfig, DefaultIntMin, DefaultIntMax) + return func() any { return min + g.random.Intn(max-min+1) } + case client.FieldKind_BOOL: + ratio := 0.5 + if prop, ok := fieldConfig.props["ratio"]; ok { + ratio = prop.(float64) + } + return func() any { return g.random.Float64() < ratio } + case client.FieldKind_FLOAT: + min, max := getMinMaxOrDefault(fieldConfig, 0.0, 1.0) + return func() any { return min + g.random.Float64()*(max-min) } + } + panic("Can not generate random value for unknown type: " + fieldKind.String()) +} + +func validateDefinitions(definitions []client.CollectionDefinition) error { + colIDs := make(map[uint32]struct{}) + colNames := make(map[string]struct{}) + fieldRefs := []string{} + for _, def := range definitions { + if def.Description.Name == "" { + return NewErrIncompleteColDefinition("description name is empty") + } + if def.Schema.Name == "" { + return NewErrIncompleteColDefinition("schema name is empty") + } + if def.Description.Name != def.Schema.Name { + return NewErrIncompleteColDefinition("description name and schema name do not match") + } + for _, field := range def.Schema.Fields { + if field.Name == "" { + return NewErrIncompleteColDefinition("field name is empty") + } + if field.IsObject() { + if field.Schema == "" { + return NewErrIncompleteColDefinition("field schema is empty") + } + fieldRefs = append(fieldRefs, field.Schema) + } + } + colNames[def.Description.Name] = struct{}{} + colIDs[def.Description.ID] = struct{}{} + } + for _, ref := range fieldRefs { + if _, ok := colNames[ref]; !ok { + return NewErrIncompleteColDefinition("field schema references unknown collection") + } + } + if len(colIDs) != len(definitions) { + return NewErrIncompleteColDefinition("duplicate collection IDs") + } + return nil +} diff --git a/tests/gen/gen_auto_config.go b/tests/gen/gen_auto_config.go new file mode 100644 index 0000000000..ccebce92d1 --- /dev/null +++ b/tests/gen/gen_auto_config.go @@ -0,0 +1,172 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "reflect" + + "github.com/sourcenetwork/defradb/client" +) + +// genConfig is a configuration for a generation of a field. +type genConfig struct { + labels []string + props map[string]any + fieldGenerator GenerateFieldFunc +} + +// configsMap is a map of type name to a map of field name to a generation configuration. +type configsMap map[string]map[string]genConfig + +// ForField returns the generation configuration for a specific field of a type. +func (m configsMap) ForField(typeStr, fieldName string) genConfig { + typeConfig, ok := m[typeStr] + if !ok { + typeConfig = make(map[string]genConfig) + m[typeStr] = typeConfig + } + fieldConfig, ok := typeConfig[fieldName] + if !ok { + fieldConfig.props = make(map[string]any) + } + return fieldConfig +} + +// AddForField adds a generation configuration for a specific field of a type. +func (m configsMap) AddForField(typeStr, fieldName string, conf genConfig) { + typeConfig, ok := m[typeStr] + if !ok { + typeConfig = make(map[string]genConfig) + m[typeStr] = typeConfig + } + typeConfig[fieldName] = conf + m[typeStr] = typeConfig +} + +func validateConfig(types map[string]client.CollectionDefinition, configsMap configsMap) error { + for typeName, typeConfigs := range configsMap { + typeDef := types[typeName] + if typeDef.Description.Name == "" { + return newNotDefinedTypeErr(typeName) + } + for fieldName, fieldConfig := range typeConfigs { + fieldDef, hasField := typeDef.Description.GetFieldByName(fieldName, &typeDef.Schema) + if !hasField { + return NewErrInvalidConfiguration("field " + fieldName + + " is not defined in the schema for type " + typeName) + } + err := checkAndValidateMinMax(&fieldDef, &fieldConfig) + if err != nil { + return err + } + + err = checkAndValidateLen(&fieldDef, &fieldConfig) + if err != nil { + return err + } + + err = checkAndValidateRatio(&fieldDef, &fieldConfig) + if err != nil { + return err + } + } + } + return nil +} + +func checkAndValidateMinMax(field *client.FieldDescription, conf *genConfig) error { + _, hasMin := conf.props["min"] + if hasMin { + var err error + if field.IsArray() || field.Kind == client.FieldKind_INT { + err = validateMinConfig[int](conf, field.IsArray()) + } else { + err = validateMinConfig[float64](conf, false) + } + if err != nil { + return err + } + } else if _, hasMax := conf.props["max"]; hasMax { + return NewErrInvalidConfiguration("max value is set, but min value is not set") + } + return nil +} + +func checkAndValidateLen(field *client.FieldDescription, conf *genConfig) error { + lenConf, hasLen := conf.props["len"] + if hasLen { + if field.Kind != client.FieldKind_STRING { + return NewErrInvalidConfiguration("len is used on not String") + } + len, ok := lenConf.(int) + if !ok { + return NewErrInvalidConfiguration("len value is not integer") + } + if len < 1 { + return NewErrInvalidConfiguration("len value is less than 1") + } + } + return nil +} + +func checkAndValidateRatio(field *client.FieldDescription, conf *genConfig) error { + ratioConf, hasRatio := conf.props["ratio"] + if hasRatio { + if field.Kind != client.FieldKind_BOOL { + return NewErrInvalidConfiguration("ratio is used on not Boolean") + } + len, ok := ratioConf.(float64) + if !ok { + return NewErrInvalidConfiguration("ratio value is not float") + } + if len < 0 { + return NewErrInvalidConfiguration("ratio value is negative") + } + if len > 1 { + return NewErrInvalidConfiguration("ratio value greater than 1.0") + } + } + return nil +} + +func validateMinConfig[T int | float64](fieldConf *genConfig, onlyPositive bool) error { + min, ok := fieldConf.props["min"].(T) + if !ok { + var t T + return NewErrInvalidConfiguration("min value on array is not " + reflect.TypeOf(t).Name()) + } + if min < 0 && onlyPositive { + return NewErrInvalidConfiguration("min value on array is less than 0") + } + if maxProp, hasMax := fieldConf.props["max"]; hasMax { + max, ok := maxProp.(T) + if !ok && onlyPositive { + var t T + return NewErrInvalidConfiguration("max value for array is not " + reflect.TypeOf(t).Name()) + } + if min > max { + return NewErrInvalidConfiguration("min value on array is greater than max value") + } + } else { + return NewErrInvalidConfiguration("min value is set, but max value is not set") + } + return nil +} + +func getMinMaxOrDefault[T int | float64](conf genConfig, min, max T) (T, T) { + if prop, ok := conf.props["min"]; ok { + min = prop.(T) + } + if prop, ok := conf.props["max"]; ok { + max = prop.(T) + } + return min, max +} diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go new file mode 100644 index 0000000000..30f0f70efe --- /dev/null +++ b/tests/gen/gen_auto_configurator.go @@ -0,0 +1,441 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "math" + "math/rand" + "time" + + "github.com/sourcenetwork/defradb/client" +) + +type typeDemand struct { + min, max int + usedDefined bool +} + +func (d typeDemand) getAverage() int { + if d.max == math.MaxInt { + return d.max + } + return (d.min + d.max) / 2 +} + +// docsGenConfigurator is responsible for handling the provided configuration and +// configuring the document generator. This includes things like setting up the +// demand for each type, setting up the relation usage counters, and setting up +// the random seed. +type docsGenConfigurator struct { + types map[string]client.CollectionDefinition + config configsMap + primaryGraph map[string][]string + typesOrder []string + docsDemand map[string]typeDemand + usageCounter typeUsageCounters + random *rand.Rand +} + +// typeUsageCounters is a map of primary type to secondary type to field name to +// relation usage. This is used to keep track of the usage of each relation. +// Each foreign field has a tracker that keeps track of which and how many of primary +// documents have been used for that foreign field. This is used to ensure that the +// number of documents generated for each primary type is within the range of the +// demand for that type and to guarantee a uniform distribution of the documents. +type typeUsageCounters struct { + m map[string]map[string]map[string]*relationUsage + random *rand.Rand +} + +func newTypeUsageCounter(random *rand.Rand) typeUsageCounters { + return typeUsageCounters{ + m: make(map[string]map[string]map[string]*relationUsage), + random: random, + } +} + +// addRelationUsage adds a relation usage tracker for a foreign field. +func (c *typeUsageCounters) addRelationUsage( + secondaryType string, + field client.FieldDescription, + min, max, numDocs int, +) { + primaryType := field.Schema + if _, ok := c.m[primaryType]; !ok { + c.m[primaryType] = make(map[string]map[string]*relationUsage) + } + if _, ok := c.m[primaryType][secondaryType]; !ok { + c.m[primaryType][secondaryType] = make(map[string]*relationUsage) + } + if _, ok := c.m[primaryType][secondaryType][field.Name]; !ok { + c.m[primaryType][secondaryType][field.Name] = newRelationUsage(min, max, numDocs, c.random) + } +} + +// getNextTypeIndForField returns the next index to be used for a foreign field. +func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.FieldDescription) int { + current := c.m[field.Schema][secondaryType][field.Name] + return current.useNextDocKey() +} + +type relationUsage struct { + // counter is the number of primary documents that have been used for the relation. + counter int + // minAmount is the minimum number of primary documents that should be used for the relation. + minAmount int + // maxAmount is the maximum number of primary documents that should be used for the relation. + maxAmount int + // docKeysCounter is a slice of structs that keep track of the number of times + // each primary document has been used for the relation. + docKeysCounter []struct { + // ind is the index of the primary document. + ind int + // count is the number of times the primary document has been used for the relation. + count int + } + // numAvailableDocs is the number of documents of the primary type that are available + // for the relation. + numAvailableDocs int + random *rand.Rand +} + +func newRelationUsage(minAmount, maxAmount, numDocs int, random *rand.Rand) *relationUsage { + return &relationUsage{ + minAmount: minAmount, + maxAmount: maxAmount, + numAvailableDocs: numDocs, + random: random, + } +} + +// useNextDocKey determines the next primary document to be used for the relation, tracks +// it and returns its index. +func (u *relationUsage) useNextDocKey() int { + docKeyCounterInd := 0 + // if a primary document has a minimum number of secondary documents that should be + // generated for it, then it should be used until that minimum is reached. + // After that, we can pick a random primary document to use. + if u.counter >= u.minAmount*u.numAvailableDocs { + docKeyCounterInd = u.random.Intn(len(u.docKeysCounter)) + } else { + docKeyCounterInd = u.counter % len(u.docKeysCounter) + } + currentInd := u.docKeysCounter[docKeyCounterInd].ind + docCounter := &u.docKeysCounter[docKeyCounterInd] + docCounter.count++ + // if the primary document reached max number of secondary documents, we can remove it + // from the slice of primary documents that are available for the relation. + if docCounter.count >= u.maxAmount { + lastCounterInd := len(u.docKeysCounter) - 1 + *docCounter = u.docKeysCounter[lastCounterInd] + u.docKeysCounter = u.docKeysCounter[:lastCounterInd] + } + u.counter++ + + return currentInd +} + +// allocateIndexes allocates the indexes for the relation usage tracker. +func (u *relationUsage) allocateIndexes() { + docKeysCounter := make([]struct { + ind int + count int + }, u.numAvailableDocs) + for i := range docKeysCounter { + docKeysCounter[i].ind = i + } + u.docKeysCounter = docKeysCounter +} + +func newDocGenConfigurator(types map[string]client.CollectionDefinition, config configsMap) docsGenConfigurator { + return docsGenConfigurator{ + types: types, + config: config, + docsDemand: make(map[string]typeDemand), + } +} + +func (g *docsGenConfigurator) Configure(options ...Option) error { + for _, option := range options { + option(g) + } + + for typeName := range g.docsDemand { + if _, ok := g.types[typeName]; !ok { + return newNotDefinedTypeErr(typeName) + } + } + + err := validateConfig(g.types, g.config) + if err != nil { + return err + } + + if g.random == nil { + g.random = rand.New(rand.NewSource(time.Now().UnixNano())) + } + + g.usageCounter = newTypeUsageCounter(g.random) + + g.primaryGraph = getRelationGraph(g.types) + g.typesOrder = getTopologicalOrder(g.primaryGraph, g.types) + + if len(g.docsDemand) == 0 { + g.docsDemand[g.typesOrder[0]] = typeDemand{min: DefaultNumDocs, max: DefaultNumDocs} + } + + initialTypes := make(map[string]typeDemand) + for typeName, typeDemand := range g.docsDemand { + initialTypes[typeName] = typeDemand + } + + err = g.calculateDocsDemand(initialTypes) + if err != nil { + return err + } + + g.allocateUsageCounterIndexes() + return nil +} + +func (g *docsGenConfigurator) calculateDocsDemand(initialTypes map[string]typeDemand) error { + for typeName, demand := range initialTypes { + var err error + // from the current type we go up the graph and calculate the demand for primary types + demand, err = g.getPrimaryDemand(typeName, demand, g.primaryGraph) + if err != nil { + return err + } + g.docsDemand[typeName] = demand + + err = g.calculateDemandForSecondaryTypes(typeName, g.primaryGraph) + if err != nil { + return err + } + } + + // for other types that are not in the same graph as the initial types, we start with primary + // types, give them default demand value and calculate the demand for secondary types. + for _, typeName := range g.typesOrder { + if _, ok := g.docsDemand[typeName]; !ok { + g.docsDemand[typeName] = typeDemand{min: DefaultNumDocs, max: DefaultNumDocs} + err := g.calculateDemandForSecondaryTypes(typeName, g.primaryGraph) + if err != nil { + return err + } + } + } + return nil +} + +// allocateUsageCounterIndexes allocates the indexes for each relation usage tracker. +func (g *docsGenConfigurator) allocateUsageCounterIndexes() { + max := 0 + for _, demand := range g.docsDemand { + if demand.max > max && demand.max != math.MaxInt { + max = demand.max + } + } + for typeName, demand := range g.docsDemand { + if demand.max == math.MaxInt { + demand.max = max + demand.min = max + g.docsDemand[typeName] = demand + } + for _, usage := range g.usageCounter.m[typeName] { + for _, field := range usage { + if field.numAvailableDocs == math.MaxInt { + field.numAvailableDocs = max + } + if field.numAvailableDocs > demand.max { + field.numAvailableDocs = demand.max + } + field.allocateIndexes() + } + } + } +} + +func (g *docsGenConfigurator) getDemandForPrimaryType( + primaryType, secondaryType string, + secondaryDemand typeDemand, + primaryGraph map[string][]string, +) (typeDemand, error) { + primaryTypeDef := g.types[primaryType] + for _, field := range primaryTypeDef.Schema.Fields { + if field.IsObject() && field.Schema == secondaryType { + primaryDemand := typeDemand{min: secondaryDemand.min, max: secondaryDemand.max} + minPerDoc, maxPerDoc := 1, 1 + if field.IsArray() { + fieldConf := g.config.ForField(primaryType, field.Name) + minPerDoc, maxPerDoc = getMinMaxOrDefault(fieldConf, 0, secondaryDemand.max) + // if we request min 100 of secondary docs and there can be max 5 per primary doc, + // then we need to generate at least 20 primary docs. + minRatio := float64(secondaryDemand.min) / float64(maxPerDoc) + primaryDemand.min = int(math.Ceil(minRatio)) + if minPerDoc == 0 { + primaryDemand.max = math.MaxInt + } else { + // if we request max 200 of secondary docs and there can be min 10 per primary doc, + // then we need to generate at most 2000 primary docs. + maxRatio := float64(secondaryDemand.max) / float64(minPerDoc) + primaryDemand.max = int(math.Floor(maxRatio)) + } + + var err error + primaryDemand, err = g.getPrimaryDemand(primaryType, primaryDemand, primaryGraph) + if err != nil { + return typeDemand{}, err + } + } + if currentDemand, ok := g.docsDemand[primaryType]; ok { + if primaryDemand.min < currentDemand.min { + primaryDemand.min = currentDemand.min + } + if primaryDemand.max > currentDemand.max { + primaryDemand.max = currentDemand.max + } + } + + if primaryDemand.min > primaryDemand.max { + return typeDemand{}, NewErrCanNotSupplyTypeDemand(primaryType) + } + g.docsDemand[primaryType] = primaryDemand + g.initRelationUsages(field.Schema, primaryType, minPerDoc, maxPerDoc) + } + } + return secondaryDemand, nil +} + +func (g *docsGenConfigurator) getPrimaryDemand( + secondaryType string, + secondaryDemand typeDemand, + primaryGraph map[string][]string, +) (typeDemand, error) { + for _, primaryTypeName := range primaryGraph[secondaryType] { + var err error + secondaryDemand, err = g.getDemandForPrimaryType(primaryTypeName, secondaryType, secondaryDemand, primaryGraph) + if err != nil { + return typeDemand{}, err + } + } + return secondaryDemand, nil +} + +func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( + typeName string, + primaryGraph map[string][]string, +) error { + typeDef := g.types[typeName] + for _, field := range typeDef.Schema.Fields { + if field.IsObject() && !field.IsPrimaryRelation() { + primaryDocDemand := g.docsDemand[typeName] + newSecDemand := typeDemand{min: primaryDocDemand.min, max: primaryDocDemand.max} + min, max := 1, 1 + + if field.IsArray() { + fieldConf := g.config.ForField(typeName, field.Name) + min, max = getMinMaxOrDefault(fieldConf, DefaultNumChildrenPerDoc, DefaultNumChildrenPerDoc) + newSecDemand.max = primaryDocDemand.min * max + newSecDemand.min = primaryDocDemand.max * min + } + + curSecDemand := g.docsDemand[field.Schema] + if curSecDemand.usedDefined && + (curSecDemand.min < newSecDemand.min || curSecDemand.max > newSecDemand.max) { + return NewErrCanNotSupplyTypeDemand(field.Schema) + } + g.docsDemand[field.Schema] = newSecDemand + g.initRelationUsages(field.Schema, typeName, min, max) + + err := g.calculateDemandForSecondaryTypes(field.Schema, primaryGraph) + if err != nil { + return err + } + + for _, primaryTypeName := range primaryGraph[field.Schema] { + if _, ok := g.docsDemand[primaryTypeName]; !ok { + primaryDemand, err := g.getDemandForPrimaryType(primaryTypeName, field.Schema, newSecDemand, primaryGraph) + if err != nil { + return err + } + g.docsDemand[primaryTypeName] = primaryDemand + } + } + } + } + return nil +} + +func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, min, max int) { + secondaryTypeDef := g.types[secondaryType] + for _, secondaryTypeField := range secondaryTypeDef.Schema.Fields { + if secondaryTypeField.Schema == primaryType { + g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, min, max, g.docsDemand[primaryType].getAverage()) + } + } +} + +func getRelationGraph(types map[string]client.CollectionDefinition) map[string][]string { + primaryGraph := make(map[string][]string) + + appendUnique := func(slice []string, val string) []string { + for _, item := range slice { + if item == val { + return slice + } + } + return append(slice, val) + } + + for typeName, typeDef := range types { + for _, field := range typeDef.Schema.Fields { + if field.IsObject() { + if field.IsPrimaryRelation() { + primaryGraph[typeName] = appendUnique(primaryGraph[typeName], field.Schema) + } else { + primaryGraph[field.Schema] = appendUnique(primaryGraph[field.Schema], typeName) + } + } + } + } + + return primaryGraph +} + +func getTopologicalOrder(graph map[string][]string, types map[string]client.CollectionDefinition) []string { + visited := make(map[string]bool) + stack := []string{} + + var dfs func(node string) + dfs = func(node string) { + if visited[node] { + return + } + visited[node] = true + + for _, neighbor := range graph[node] { + if !visited[neighbor] { + dfs(neighbor) + } + } + + stack = append(stack, node) + } + + for typeName := range types { + if !visited[typeName] { + dfs(typeName) + } + } + + return stack +} diff --git a/tests/gen/gen_auto_option.go b/tests/gen/gen_auto_option.go new file mode 100644 index 0000000000..3ed80c531e --- /dev/null +++ b/tests/gen/gen_auto_option.go @@ -0,0 +1,69 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import "math/rand" + +// Option is a function that configures a document generator. +type Option func(*docsGenConfigurator) + +// WithTypeDemand configures the demand for a type. +func WithTypeDemand(typeName string, demand int) Option { + return func(g *docsGenConfigurator) { + g.docsDemand[typeName] = typeDemand{min: demand, max: demand, usedDefined: true} + } +} + +// WithTypeDemandRange configures the demand range for a type. +func WithTypeDemandRange(typeName string, min, max int) Option { + return func(g *docsGenConfigurator) { + g.docsDemand[typeName] = typeDemand{min: min, max: min, usedDefined: true} + } +} + +// WithTypeDemandRange configures the value range for a field. +func WithFieldRange[T int | float64](typeName, fieldName string, min, max T) Option { + return func(g *docsGenConfigurator) { + conf := g.config.ForField(typeName, fieldName) + conf.props["min"] = min + conf.props["max"] = max + g.config.AddForField(typeName, fieldName, conf) + } +} + +// WithFieldLen configures the length of a string field. +func WithFieldLen(typeName, fieldName string, length int) Option { + return func(g *docsGenConfigurator) { + conf := g.config.ForField(typeName, fieldName) + conf.props["len"] = length + g.config.AddForField(typeName, fieldName, conf) + } +} + +// WithFieldLabels configures a custom field value generator. +func WithFieldGenerator(typeName, fieldName string, genFunc GenerateFieldFunc) Option { + return func(g *docsGenConfigurator) { + g.config.AddForField(typeName, fieldName, genConfig{fieldGenerator: genFunc}) + } +} + +// WithRandomSeed configures the random seed for the document generator. +func WithRandomSeed(seed int64) Option { + return func(g *docsGenConfigurator) { + g.random = rand.New(rand.NewSource(seed)) + } +} + +// GenerateFieldFunc is a function that provides custom field values +// It is used as an option to the document generator. +// The function receives the index of the document being generated and a function that +// generates the next value in the sequence of values for the field. +type GenerateFieldFunc func(i int, next func() any) any diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go new file mode 100644 index 0000000000..a29f5b9f28 --- /dev/null +++ b/tests/gen/gen_auto_test.go @@ -0,0 +1,1304 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "math" + "testing" + "time" + + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/client" +) + +func getField(t *testing.T, doc *client.Document, fieldName string) any { + fVal, err := doc.GetValue(fieldName) + if err != nil { + assert.Fail(t, "field %s not found", fieldName) + } + return fVal.Value() +} + +func getStringField(t *testing.T, doc *client.Document, fieldName string) string { + val, ok := getField(t, doc, fieldName).(string) + assert.True(t, ok, "field %s is not of type string", fieldName) + return val +} + +func getIntField(t *testing.T, doc *client.Document, fieldName string) int { + fVal := getField(t, doc, fieldName) + switch val := fVal.(type) { + case int: + return val + case float64: + return int(val) + case int64: + return int(val) + } + assert.Fail(t, "field %s is not of type int or float64", fieldName) + return 0 +} + +func getFloatField(t *testing.T, doc *client.Document, fieldName string) float64 { + val, ok := getField(t, doc, fieldName).(float64) + assert.True(t, ok, "field %s is not of type float64", fieldName) + return val +} + +func getBooleanField(t *testing.T, doc *client.Document, fieldName string) bool { + val, ok := getField(t, doc, fieldName).(bool) + assert.True(t, ok, "field %s is not of type bool", fieldName) + return val +} + +func getDocKeysFromDocs(docs []*client.Document) []string { + var result []string + for _, doc := range docs { + result = append(result, doc.Key().String()) + } + return result +} + +func filterByCollection(docs []GeneratedDoc, name string) []*client.Document { + var result []*client.Document + for _, doc := range docs { + if doc.Col.Description.Name == name { + result = append(result, doc.Doc) + } + } + return result +} + +func removeDuplicateStr(strSlice []string) []string { + allKeys := make(map[string]bool) + list := make([]string, 0, len(strSlice)) + for _, item := range strSlice { + if _, value := allKeys[item]; !value { + allKeys[item] = true + list = append(list, item) + } + } + return list +} + +func assertDocKeysMatch( + t *testing.T, + docs []GeneratedDoc, + primaryCol, secondaryCol string, + foreignField string, + allowDuplicates bool, +) { + primaryDocs := filterByCollection(docs, primaryCol) + secondaryDocs := filterByCollection(docs, secondaryCol) + + docKeys := getDocKeysFromDocs(primaryDocs) + foreignValues := make([]string, 0, len(secondaryDocs)) + for _, secDoc := range secondaryDocs { + foreignValues = append(foreignValues, getStringField(t, secDoc, foreignField)) + } + + if allowDuplicates { + newValues := removeDuplicateStr(foreignValues) + foreignValues = newValues + } + + assert.ElementsMatch(t, docKeys, foreignValues) +} + +func assertUniformlyDistributedIntFieldRange(t *testing.T, docs []GeneratedDoc, fieldName string, minVal, maxVal int) { + vals := make(map[int]bool, len(docs)) + foundMin := math.MaxInt + foundMax := math.MinInt + for _, doc := range docs { + val := getIntField(t, doc.Doc, fieldName) + vals[val] = true + if val < foundMin { + foundMin = val + } + if val > foundMax { + foundMax = val + } + } + intRange := maxVal - minVal + intPrecision := (intRange * 20 / len(docs)) + assert.LessOrEqual(t, foundMin, minVal+intPrecision, "field %s is not distributed across the range", fieldName) + assert.GreaterOrEqual(t, foundMax, maxVal-intPrecision-1, "field %s is not distributed across the range", fieldName) + + expectedLen := len(docs) + if intRange < expectedLen { + expectedLen = intRange + } + expectedLen = int(float64(expectedLen) * 0.7) + assert.GreaterOrEqual(t, len(vals), expectedLen, "values of field %s are not uniformly distributed", fieldName) +} + +func assertUniformlyDistributedStringField(t *testing.T, docs []GeneratedDoc, fieldName string, strLen int) { + vals := make(map[string]bool, len(docs)) + var wrongStr string + for _, doc := range docs { + val := getStringField(t, doc.Doc, fieldName) + vals[val] = true + if len(val) != strLen { + wrongStr = val + } + } + if wrongStr != "" { + assert.Fail(t, "unexpected string length", "encountered %s field's value with unexpected len. Example: %s should be of len %d", + fieldName, wrongStr, strLen) + } + assert.GreaterOrEqual(t, len(vals), int(float64(len(docs))*0.99), + "values of field %s are not uniformly distributed", fieldName) +} + +func assertUniformlyDistributedBoolField(t *testing.T, docs []GeneratedDoc, fieldName string, ratio float64) { + trueCounter := 0 + + for _, doc := range docs { + if getBooleanField(t, doc.Doc, fieldName) { + trueCounter++ + } + } + + const precision = 0.05 + const msg = "values of field %s are not uniformly distributed" + + actualRatio := float64(trueCounter) / float64(len(docs)) + assert.GreaterOrEqual(t, actualRatio+precision, ratio, msg, fieldName) + assert.LessOrEqual(t, actualRatio-precision, ratio, msg, fieldName) +} + +func assertUniformlyDistributedFloatFieldRange(t *testing.T, docs []GeneratedDoc, fieldName string, minVal, maxVal float64) { + vals := make(map[float64]bool, len(docs)) + foundMin := math.Inf(1) + foundMax := math.Inf(-1) + for _, doc := range docs { + val := getFloatField(t, doc.Doc, fieldName) + vals[val] = true + if val < foundMin { + foundMin = val + } + if val > foundMax { + foundMax = val + } + } + floatPrecision := ((maxVal - minVal) / float64(len(docs))) * 20 + assert.LessOrEqual(t, foundMin, minVal+floatPrecision, "field %s is not distributed across the range", fieldName) + assert.GreaterOrEqual(t, foundMax, maxVal-floatPrecision, "field %s is not distributed across the range", fieldName) + + assert.GreaterOrEqual(t, len(vals), int(float64(len(docs))*0.7), "values of field %s are not uniformly distributed", fieldName) +} + +func assertUniformRelationDistribution( + t *testing.T, + docs []GeneratedDoc, + primaryColInd, secondaryColInd string, + foreignField string, + min, max int, +) { + primaryCol := filterByCollection(docs, primaryColInd) + secondaryCol := filterByCollection(docs, secondaryColInd) + assert.GreaterOrEqual(t, len(secondaryCol), len(primaryCol)*min) + assert.LessOrEqual(t, len(secondaryCol), len(primaryCol)*max) + + secondaryPerPrimary := make(map[string]int) + for _, d := range secondaryCol { + docKey := getStringField(t, d, foreignField) + secondaryPerPrimary[docKey]++ + } + minDocsPerPrimary := math.MaxInt + maxDocsPerPrimary := math.MinInt + for _, numDevices := range secondaryPerPrimary { + if numDevices < minDocsPerPrimary { + minDocsPerPrimary = numDevices + } + if numDevices > maxDocsPerPrimary { + maxDocsPerPrimary = numDevices + } + } + + assert.LessOrEqual(t, minDocsPerPrimary, min+1) + assert.GreaterOrEqual(t, minDocsPerPrimary, min) + + assert.LessOrEqual(t, maxDocsPerPrimary, max) + assert.GreaterOrEqual(t, maxDocsPerPrimary, max-1) +} + +func TestAutoGenerateFromSchema_Simple(t *testing.T) { + const numUsers = 1000 + schema := ` + type User { + name: String + age: Int + verified: Boolean + rating: Float + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + assert.Len(t, docs, numUsers) + + assertUniformlyDistributedStringField(t, docs, "name", 10) + assertUniformlyDistributedIntFieldRange(t, docs, "age", 0, 10000) + assertUniformlyDistributedBoolField(t, docs, "verified", 0.5) + assertUniformlyDistributedFloatFieldRange(t, docs, "rating", 0.0, 1.0) +} + +func TestAutoGenerateFromSchema_ConfigIntRange(t *testing.T) { + const numUsers = 1000 + schema := ` + type User { + age: Int # min: 1, max: 120 + money: Int # min: -1000, max: 10000 + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + assert.Len(t, docs, numUsers) + + assertUniformlyDistributedIntFieldRange(t, docs, "age", 1, 120) + assertUniformlyDistributedIntFieldRange(t, docs, "money", -1000, 10000) +} + +func TestAutoGenerateFromSchema_ConfigFloatRange(t *testing.T) { + const numUsers = 1000 + schema := ` + type User { + rating: Float # min: 1.5, max: 5.0 + product: Float # min: -1.0, max: 1.0 + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + assert.Len(t, docs, numUsers) + + assertUniformlyDistributedFloatFieldRange(t, docs, "rating", 1.5, 5) + assertUniformlyDistributedFloatFieldRange(t, docs, "product", -1, 1) +} + +func TestAutoGenerateFromSchema_ConfigStringLen(t *testing.T) { + const numUsers = 1000 + schema := ` + type User { + name: String # len: 8 + email: String # len: 12 + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + assert.Len(t, docs, numUsers) + + assertUniformlyDistributedStringField(t, docs, "name", 8) + assertUniformlyDistributedStringField(t, docs, "email", 12) +} + +func TestAutoGenerateFromSchema_ConfigBoolRatio(t *testing.T) { + const numUsers = 1000 + schema := ` + type User { + name: String # len: 8 + verified: Boolean # ratio: 0.2 + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + assert.Len(t, docs, numUsers) + + assertUniformlyDistributedBoolField(t, docs, "verified", 0.2) +} + +func TestAutoGenerateFromSchema_IfNoTypeDemandIsGiven_ShouldUseDefault(t *testing.T) { + schema := ` + type User { + name: String + }` + + docs, err := AutoGenerateFromSDL(schema) + assert.NoError(t, err) + + const defaultDemand = 10 + assert.Len(t, filterByCollection(docs, "User"), defaultDemand) +} + +func TestAutoGenerateFromSchema_RelationOneToOne(t *testing.T) { + const numUsers = 10 + schema := ` + type User { + name: String + device: Device + } + + type Device { + owner: User + model: String + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numUsers) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", false) +} + +func TestAutoGenerateFromSchema_RelationOneToMany(t *testing.T) { + const numUsers = 10 + schema := ` + type User { + name: String + devices: [Device] + } + + type Device { + owner: User + model: String + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numUsers*2) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) +} + +func TestAutoGenerateFromSchema_RelationOneToManyWithConfiguredNumberOfElements(t *testing.T) { + const ( + numUsers = 100 + minDevicesPerUser = 1 + maxDevicesPerUser = 5 + ) + schema := ` + type User { + name: String + devices: [Device] # min: 1, max: 5 + } + + type Device { + owner: User + model: String + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + + assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", minDevicesPerUser, maxDevicesPerUser) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) +} + +func TestAutoGenerateFromSchema_RelationOneToManyToOneWithConfiguredNumberOfElements(t *testing.T) { + const ( + numUsers = 100 + devicesPerUser = 2 + ) + schema := ` + type User { + name: String + devices: [Device] # min: 2, max: 2 + } + + type Device { + owner: User + model: String + specs: Specs + } + + type Specs { + device: Device @primary + OS: String + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numUsers*devicesPerUser) + assert.Len(t, filterByCollection(docs, "Specs"), numUsers*devicesPerUser) + + assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocKeysMatch(t, docs, "Device", "Specs", "device_id", false) +} + +func TestAutoGenerateFromSchema_RelationOneToManyToOnePrimaryWithConfiguredNumberOfElements(t *testing.T) { + const ( + numUsers = 100 + devicesPerUser = 2 + ) + schema := ` + type User { + name: String + devices: [Device] # min: 2, max: 2 + } + + type Device { + owner: User + model: String + specs: Specs @primary + } + + type Specs { + device: Device + OS: String + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("User", numUsers)) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numUsers*devicesPerUser) + assert.Len(t, filterByCollection(docs, "Specs"), numUsers*devicesPerUser) + + assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocKeysMatch(t, docs, "Specs", "Device", "specs_id", false) +} + +func TestAutoGenerateFromSchema_RelationOneToManyToManyWithNumDocsForSecondaryType(t *testing.T) { + const ( + numDevices = 40 + devicesPerUser = 2 + componentsPerDevice = 5 + ) + schema := ` + type User { + name: String + devices: [Device] # min: 2, max: 2 + } + + type Device { + owner: User + model: String + specs: Specs + components: [Component] # min: 5, max: 5 + } + + type Specs { + device: Device @primary + OS: String + } + + type Component { + device: Device + serialNumber: String + }` + + docs, err := AutoGenerateFromSDL(schema, WithTypeDemand("Device", numDevices)) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numDevices/devicesPerUser) + assert.Len(t, filterByCollection(docs, "Device"), numDevices) + assert.Len(t, filterByCollection(docs, "Specs"), numDevices) + assert.Len(t, filterByCollection(docs, "Component"), numDevices*componentsPerDevice) + + assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) + assertUniformRelationDistribution(t, docs, "Device", "Specs", "device_id", 1, 1) + assertUniformRelationDistribution(t, docs, "Device", "Component", "device_id", componentsPerDevice, componentsPerDevice) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocKeysMatch(t, docs, "Device", "Specs", "device_id", false) + assertDocKeysMatch(t, docs, "Device", "Component", "device_id", true) +} + +func TestAutoGenerateFromSchema_DemandsForDifferentRelationTrees(t *testing.T) { + const ( + numUsers = 20 + numDevices = 15 + componentsPerDevice = 2 + ) + schema := ` + type User { + name: String + } + + type Device { + model: String + components: [Component] # min: 2, max: 2 + } + + type Component { + device: Device + serialNumber: String + }` + + docs, err := AutoGenerateFromSDL( + schema, + WithTypeDemand("User", numUsers), + WithTypeDemand("Device", numDevices), + ) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numDevices) + assert.Len(t, filterByCollection(docs, "Component"), numDevices*componentsPerDevice) + + assertUniformRelationDistribution(t, docs, "Device", "Component", "device_id", componentsPerDevice, componentsPerDevice) + + assertDocKeysMatch(t, docs, "Device", "Component", "device_id", true) +} + +func TestAutoGenerateFromSchema_IfTypeDemandedForSameTreeAddsUp_ShouldGenerate(t *testing.T) { + schema := ` + type User { + name: String + devices: [Device] + orders: [Order] # min: 10, max: 10 + } + + type Device { + model: String + owner: User + } + + type Order { + id: String + user: User + }` + + docs, err := AutoGenerateFromSDL( + schema, + WithTypeDemand("Order", 10), + WithTypeDemand("Device", 30), + ) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), 1) + assert.Len(t, filterByCollection(docs, "Device"), 30) + assert.Len(t, filterByCollection(docs, "Order"), 10) +} + +func TestAutoGenerateFromSchema_IfNoDemandForPrimaryType_ShouldDeduceFromMaxSecondaryDemand(t *testing.T) { + schema := ` + type User { + name: String + devices: [Device] + Orders: [Order] + } + + type Device { + model: String + owner: User + } + + type Order { + id: String + user: User + }` + + docs, err := AutoGenerateFromSDL( + schema, + WithTypeDemand("Order", 10), + WithTypeDemand("Device", 30), + ) + assert.NoError(t, err) + + // users len should be equal to max of secondary type demand + assert.Len(t, filterByCollection(docs, "User"), 30) + assert.Len(t, filterByCollection(docs, "Device"), 30) + assert.Len(t, filterByCollection(docs, "Order"), 10) +} + +func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { + testCases := []struct { + name string + schema string + options []Option + }{ + { + name: "demand for direct parent can not be satisfied", + schema: ` + type User { + name: String + devices: [Device] # min: 2, max: 2 + } + + type Device { + model: String + owner: User + }`, + options: []Option{WithTypeDemand("Device", 1)}, + }, + { + name: "demand for grand parent can not be satisfied", + schema: ` + type User { + name: String + devices: [Device] # min: 2, max: 2 + } + + type Device { + model: String + owner: User + components: [Component] # min: 2, max: 2 + } + + type Component { + device: Device + OS: String + }`, + options: []Option{WithTypeDemand("Component", 2)}, + }, + { + name: "demand for sibling primary can not be satisfied", + schema: ` + type User { + name: String + devices: [Device] # min: 2, max: 2 + } + + type Device { + model: String + owner: User + manufacturer: Manufacturer + } + + type Manufacturer { + name: String + devices: [Device] # min: 10, max: 10 + }`, + options: []Option{WithTypeDemand("User", 4)}, + }, + { + name: "demand for secondary types from the same tree can not be satisfied", + schema: ` + type User { + name: String + devices: [Device] # min: 0, max: 29 + orders: [Order] # min: 10, max: 10 + } + + type Device { + model: String + owner: User + } + + type Order { + id: String + user: User + }`, + options: []Option{WithTypeDemand("Order", 10), WithTypeDemand("Device", 30)}, + }, + { + schema: ` + type User { + name: String + device: Device + } + + type Device { + model: String + owner: User + }`, + options: []Option{WithTypeDemand("User", 10), WithTypeDemand("Device", 30)}, + }, + { + schema: ` + type User { + name: String + device: Device + orders: Order + } + + type Device { + model: String + owner: User + } + + type Order { + id: String + user: User + }`, + options: []Option{WithTypeDemand("Order", 10), WithTypeDemand("Device", 30)}, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, err := AutoGenerateFromSDL(tc.schema, tc.options...) + + assert.ErrorContains(t, err, errCanNotSupplyTypeDemand) + }) + } +} + +func TestAutoGenerateFromSchema_InvalidConfig(t *testing.T) { + testCases := []struct { + name string + schema string + }{ + { + name: "array: max is less than min", + schema: ` + type User { + name: String + devices: [Device] # min: 2, max: 1 + } + + type Device { + model: String + owner: User + }`, + }, + { + name: "array: min is negative", + schema: ` + type User { + name: String + devices: [Device] # min: -1, max: 10 + } + + type Device { + model: String + owner: User + }`, + }, + { + name: "array: missing max", + schema: ` + type User { + name: String + devices: [Device] # min: 2 + } + + type Device { + model: String + owner: User + }`, + }, + { + name: "array: missing min", + schema: ` + type User { + name: String + devices: [Device] # max: 10 + } + + type Device { + model: String + owner: User + }`, + }, + { + name: "array: min as float", + schema: ` + type User { + name: String + devices: [Device] # min: 2.5, max: 10 + } + + type Device { + model: String + owner: User + }`, + }, + { + name: "array: max as float", + schema: ` + type User { + name: String + devices: [Device] # min: 2, max: 10.0 + } + + type Device { + model: String + owner: User + }`, + }, + { + name: "int value: max is less than min", + schema: ` + type User { + age: Int # min: 10, max: 2 + }`, + }, + { + name: "int value: missing min", + schema: ` + type User { + age: Int # max: 2 + }`, + }, + { + name: "int value: missing max", + schema: ` + type User { + age: Int # min: 2 + }`, + }, + { + name: "float value: max is less than min", + schema: ` + type User { + rating: Float # min: 10.5, max: 2.5 + }`, + }, + { + name: "float value: missing min", + schema: ` + type User { + rating: Float # max: 2.5 + }`, + }, + { + name: "float value: missing max", + schema: ` + type User { + rating: Float # min: 2.5 + }`, + }, + { + name: "min/max on not number", + schema: ` + type User { + verified: Boolean # min: 2, max: 8 + }`, + }, + { + name: "string: zero len", + schema: ` + type User { + name: String # len: 0 + }`, + }, + { + name: "string: negative len", + schema: ` + type User { + name: String # len: -2 + }`, + }, + { + name: "string: non-int len", + schema: ` + type User { + name: String # len: 8.5 + }`, + }, + { + name: "len on non-string", + schema: ` + type User { + age: Int # len: 8 + }`, + }, + { + name: "string: non-int len", + schema: ` + type User { + name: String # len: 8.5 + }`, + }, + { + name: "bool: negative ratio", + schema: ` + type User { + verified: Boolean # ratio: -0.1 + }`, + }, + { + name: "bool: ratio greater than 1", + schema: ` + type User { + verified: Boolean # ratio: 1.1 + }`, + }, + { + name: "bool: ratio with non-float type", + schema: ` + type User { + verified: Boolean # ratio: "0.5" + }`, + }, + { + name: "ratio on non-bool field", + schema: ` + type User { + age: Int # ratio: 0.5 + }`, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, err := AutoGenerateFromSDL(tc.schema, WithTypeDemand("User", 4)) + + assert.ErrorContains(t, err, errInvalidConfiguration) + }) + } +} + +func TestAutoGenerateFromSchema_CustomFieldValueGenerator(t *testing.T) { + const ( + numUsers = 10 + ageVal = 7 + ) + schema := ` + type User { + name: String + age: Int + }` + + indexes := make([]int, 0, numUsers) + intVals := make(map[int]bool) + + docs, err := AutoGenerateFromSDL(schema, + WithTypeDemand("User", numUsers), + WithFieldGenerator("User", "age", func(i int, next func() any) any { + indexes = append(indexes, i) + intVals[next().(int)] = true + return ageVal + }), + ) + assert.NoError(t, err) + + expectedIndexes := make([]int, 0, numUsers) + for i := 0; i < numUsers; i++ { + expectedIndexes = append(expectedIndexes, i) + } + + assert.Equal(t, expectedIndexes, indexes) + assert.GreaterOrEqual(t, len(intVals), numUsers-1) + + for _, doc := range docs { + actualAgeVal := getIntField(t, doc.Doc, "age") + //actualAgeVal := getIntField(t, jsonToMap(doc.JSON), "age") + assert.Equal(t, ageVal, actualAgeVal) + } +} + +func TestAutoGenerateFromSchema_IfOptionOverlapsSchemaConfig_ItShouldOverwrite(t *testing.T) { + const ( + numUsers = 20 + ) + schema := ` + type User { + name: String # len: 8 + devices: [Device] # min: 2, max: 2 + age: Int # min: 10, max: 20 + rating: Float # min: 0.0, max: 1.0 + } + + type Device { + model: String + owner: User + }` + + docs, err := AutoGenerateFromSDL( + schema, + WithTypeDemand("User", numUsers), + WithFieldRange("User", "devices", 3, 3), + WithFieldRange("User", "age", 30, 40), + WithFieldRange("User", "rating", 1.0, 2.0), + WithFieldLen("User", "name", 6), + ) + assert.NoError(t, err) + + userDocs := filterByCollection(docs, "User") + assert.Len(t, userDocs, numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numUsers*3) + + for _, userDoc := range userDocs { + actualAgeVal := getIntField(t, userDoc, "age") + assert.GreaterOrEqual(t, actualAgeVal, 30) + assert.LessOrEqual(t, actualAgeVal, 40) + + actualRatingVal := getFloatField(t, userDoc, "rating") + assert.GreaterOrEqual(t, actualRatingVal, 1.0) + assert.LessOrEqual(t, actualRatingVal, 2.0) + + actualNameVal := getStringField(t, userDoc, "name") + assert.Len(t, actualNameVal, 6) + } +} + +func TestAutoGenerateFromSchema_WithRandomSeed_ShouldBeDeterministic(t *testing.T) { + schema := ` + type User { + devices: [Device] # min: 0, max: 5 + age: Int + rating: Float + } + + type Device { + model: String + owner: User + }` + + demandOpt := WithTypeDemand("User", 5) + seed := time.Now().UnixNano() + + docs1, err := AutoGenerateFromSDL(schema, demandOpt, WithRandomSeed(seed)) + assert.NoError(t, err) + + docs2, err := AutoGenerateFromSDL(schema, demandOpt, WithRandomSeed(seed)) + assert.NoError(t, err) + + docs3, err := AutoGenerateFromSDL(schema, demandOpt, WithRandomSeed(time.Now().UnixNano())) + assert.NoError(t, err) + + assert.Equal(t, docs1, docs2) + assert.NotEqual(t, docs1, docs3) +} + +func TestAutoGenerateFromSchema_InvalidOption(t *testing.T) { + const schema = ` + type User { + name: String + }` + testCases := []struct { + name string + options []Option + }{ + { + name: "type demand for non-existing type", + options: []Option{WithTypeDemand("Invalid", 10)}, + }, + { + name: "type demand range for non-existing type", + options: []Option{WithTypeDemandRange("Invalid", 0, 10)}, + }, + { + name: "field len for non-existing type", + options: []Option{WithFieldLen("Invalid", "name", 10)}, + }, + { + name: "field len for non-existing field", + options: []Option{WithFieldLen("User", "invalid", 10)}, + }, + { + name: "field range for non-existing type", + options: []Option{WithFieldRange("Invalid", "name", 0, 10)}, + }, + { + name: "field range for non-existing field", + options: []Option{WithFieldRange("User", "invalid", 0, 10)}, + }, + { + name: "field generator for non-existing type", + options: []Option{WithFieldGenerator("Invalid", "name", func(i int, next func() any) any { return "" })}, + }, + { + name: "field generator for non-existing field", + options: []Option{WithFieldGenerator("User", "invalid", func(i int, next func() any) any { return "" })}, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + _, err := AutoGenerateFromSDL(schema, tc.options...) + + assert.ErrorContains(t, err, errInvalidConfiguration) + }) + } +} + +func TestAutoGenerate_IfCollectionDefinitionIsIncomplete_ReturnError(t *testing.T) { + getValidDefs := func() []client.CollectionDefinition { + return []client.CollectionDefinition{ + { + Description: client.CollectionDescription{ + Name: "User", + ID: 0, + }, + Schema: client.SchemaDescription{ + Name: "User", + Fields: []client.FieldDescription{ + { + Name: "name", + Kind: client.FieldKind_INT, + }, + { + Name: "device", + Kind: client.FieldKind_FOREIGN_OBJECT, + Schema: "Device", + RelationType: client.Relation_Type_ONE | client.Relation_Type_ONEONE, + }, + }, + }, + }, + { + Description: client.CollectionDescription{ + Name: "Device", + ID: 1, + }, + Schema: client.SchemaDescription{ + Name: "Device", + Fields: []client.FieldDescription{ + { + Name: "model", + Kind: client.FieldKind_STRING, + }, + { + Name: "owner", + Kind: client.FieldKind_FOREIGN_OBJECT, + Schema: "User", + RelationType: client.Relation_Type_ONE | + client.Relation_Type_ONEONE | + client.Relation_Type_Primary, + }, + }, + }, + }, + } + } + + testCases := []struct { + name string + changeDefs func(defs []client.CollectionDefinition) + }{ + { + name: "description name is empty", + changeDefs: func(defs []client.CollectionDefinition) { + defs[0].Description.Name = "" + }, + }, + { + name: "schema name is empty", + changeDefs: func(defs []client.CollectionDefinition) { + defs[0].Schema.Name = "" + }, + }, + { + name: "field name is empty", + changeDefs: func(defs []client.CollectionDefinition) { + defs[0].Schema.Fields[0].Name = "" + }, + }, + { + name: "not matching names", + changeDefs: func(defs []client.CollectionDefinition) { + defs[0].Schema.Name = "Device" + }, + }, + { + name: "ids are not enumerated", + changeDefs: func(defs []client.CollectionDefinition) { + defs[1].Description.ID = 0 + }, + }, + { + name: "relation field is missing schema name", + changeDefs: func(defs []client.CollectionDefinition) { + defs[1].Schema.Fields[1].Schema = "" + }, + }, + { + name: "relation field references unknown schema", + changeDefs: func(defs []client.CollectionDefinition) { + defs[1].Schema.Fields[1].Schema = "Unknown" + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + defs := getValidDefs() + tc.changeDefs(defs) + _, err := AutoGenerate(defs) + + assert.ErrorContains(t, err, errIncompleteColDefinition) + }) + } +} + +func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { + const ( + numUsers = 20 + ) + + defs := []client.CollectionDefinition{ + { + Description: client.CollectionDescription{ + Name: "User", + ID: 0, + }, + Schema: client.SchemaDescription{ + Name: "User", + Fields: []client.FieldDescription{ + { + Name: "name", + Kind: client.FieldKind_STRING, + }, + { + Name: "age", + Kind: client.FieldKind_INT, + }, + { + Name: "rating", + Kind: client.FieldKind_FLOAT, + }, + { + Name: "devices", + Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, + Schema: "Device", + RelationType: client.Relation_Type_MANY | client.Relation_Type_ONEMANY, + }, + }, + }, + }, + { + Description: client.CollectionDescription{ + Name: "Device", + ID: 1, + }, + Schema: client.SchemaDescription{ + Name: "Device", + Fields: []client.FieldDescription{ + { + Name: "model", + Kind: client.FieldKind_STRING, + }, + { + Name: "owner", + Kind: client.FieldKind_FOREIGN_OBJECT, + Schema: "User", + RelationType: client.Relation_Type_ONE | + client.Relation_Type_ONEMANY | + client.Relation_Type_Primary, + }, + }, + }, + }, + } + docs, err := AutoGenerate( + defs, + WithTypeDemand("User", numUsers), + WithFieldRange("User", "devices", 3, 3), + WithFieldRange("User", "age", 30, 40), + WithFieldRange("User", "rating", 1.0, 2.0), + WithFieldLen("User", "name", 6), + ) + assert.NoError(t, err) + + userDocs := filterByCollection(docs, "User") + assert.Len(t, userDocs, numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numUsers*3) + + for _, userDoc := range userDocs { + actualAgeVal := getIntField(t, userDoc, "age") + assert.GreaterOrEqual(t, actualAgeVal, 30) + assert.LessOrEqual(t, actualAgeVal, 40) + + actualRatingVal := getFloatField(t, userDoc, "rating") + assert.GreaterOrEqual(t, actualRatingVal, 1.0) + assert.LessOrEqual(t, actualRatingVal, 2.0) + + actualNameVal := getStringField(t, userDoc, "name") + assert.Len(t, actualNameVal, 6) + } +} diff --git a/tests/gen/schema_parser.go b/tests/gen/schema_parser.go new file mode 100644 index 0000000000..216376c26d --- /dev/null +++ b/tests/gen/schema_parser.go @@ -0,0 +1,195 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "context" + "strconv" + "strings" + "unicode" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/request/graphql" +) + +func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { + parser, err := graphql.NewParser() + if err != nil { + return nil, err + } + cols, err := parser.ParseSDL(context.Background(), gqlSDL) + if err != nil { + return nil, err + } + result := make(map[string]client.CollectionDefinition) + for _, col := range cols { + result[col.Description.Name] = col + } + return result, nil +} + +func parseConfig(gqlSDL string) (configsMap, error) { + parser := configParser{} + err := parser.parse(gqlSDL) + if err != nil { + return nil, err + } + return parser.genConfigs, nil +} + +const typePrefix = "type" + +type configParser struct { + genConfigs map[string]map[string]genConfig + currentConfig map[string]genConfig + currentType string + expectTypeName bool +} + +func (p *configParser) tryParseTypeName(line string) { + const typePrefixLen = len(typePrefix) + if strings.HasPrefix(line, typePrefix) && (len(line) == typePrefixLen || + unicode.IsSpace(rune(line[typePrefixLen]))) { + p.expectTypeName = true + line = strings.TrimSpace(line[typePrefixLen:]) + } + + if !p.expectTypeName || line == "" { + return + } + + typeNameEndPos := strings.Index(line, " ") + if typeNameEndPos == -1 { + typeNameEndPos = len(line) + } + p.currentType = strings.TrimSpace(line[:typeNameEndPos]) + p.currentConfig = make(map[string]genConfig) + p.expectTypeName = false +} + +func (p *configParser) tryParseConfig(line string) (bool, error) { + configPos := strings.Index(line, "#") + if configPos != -1 { + var err error + pos := strings.LastIndex(line[:configPos], ":") + if pos == -1 { + return true, nil + } + fields := strings.Fields(line[:pos]) + propName := fields[len(fields)-1] + p.currentConfig[propName], err = parseGenConfig(line[configPos+1:]) + if err != nil { + return false, err + } + return true, nil + } + return false, nil +} + +func (p *configParser) parseLine(line string) error { + line = strings.TrimSpace(line) + if p.currentType == "" { + p.tryParseTypeName(line) + } + skipLine, err := p.tryParseConfig(line) + if err != nil { + return err + } + if skipLine { + return nil + } + closeTypePos := strings.Index(line, "}") + if closeTypePos != -1 { + if len(p.currentConfig) > 0 { + p.genConfigs[p.currentType] = p.currentConfig + } + p.currentType = "" + return p.parseLine(line[closeTypePos+1:]) + } + return nil +} + +func (p *configParser) parse(gqlSDL string) error { + p.genConfigs = make(map[string]map[string]genConfig) + + schemaLines := strings.Split(gqlSDL, "\n") + for _, line := range schemaLines { + err := p.parseLine(line) + if err != nil { + return err + } + } + return nil +} + +func parseGenConfig(configStr string) (genConfig, error) { + configStr = strings.TrimSpace(configStr) + if configStr == "" { + return genConfig{}, nil + } + + config := genConfig{props: make(map[string]any)} + configParts := strings.Split(configStr, ",") + for _, part := range configParts { + part = strings.TrimSpace(part) + if part == "" { + continue + } + propParts := strings.Split(part, ":") + if len(propParts) == 1 { + if strings.Contains(part, " ") { + return genConfig{}, NewErrFailedToParse("Config label should not contain spaces: " + configStr) + } + config.labels = append(config.labels, strings.TrimSpace(propParts[0])) + } else { + propName := strings.TrimSpace(propParts[0]) + if propName == "" { + return genConfig{}, NewErrFailedToParse("Config property is missing a name: " + configStr) + } + propVal := strings.TrimSpace(propParts[1]) + if propVal == "" { + return genConfig{}, NewErrFailedToParse("Config property is missing a value: " + configStr) + } + val, err := parseGenConfigValue(propVal) + if err != nil { + return genConfig{}, err + } + config.props[propName] = val + } + } + if len(config.props) == 0 { + config.props = nil + } + + return config, nil +} + +func parseGenConfigValue(valueStr string) (any, error) { + valueStr = strings.TrimSpace(valueStr) + if valueStr == "true" { + return true, nil + } + if valueStr == "false" { + return false, nil + } + if valueStr[0] == '"' { + return valueStr[1 : len(valueStr)-1], nil + } + if strings.Contains(valueStr, ".") { + if val, err := strconv.ParseFloat(valueStr, 64); err == nil { + return val, nil + } + } + if val, err := strconv.ParseInt(valueStr, 10, 32); err == nil { + return int(val), nil + } + return nil, NewErrFailedToParse("Failed to parse config value " + valueStr) +} diff --git a/tests/gen/schema_parser_test.go b/tests/gen/schema_parser_test.go new file mode 100644 index 0000000000..5151a23dda --- /dev/null +++ b/tests/gen/schema_parser_test.go @@ -0,0 +1,297 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package gen + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSchemaParser_ParseGenConfig(t *testing.T) { + tests := []struct { + name string + schema string + want configsMap + }{ + { + name: "string values", + schema: ` + type User { + name: String # pattern: "some pattern" + }`, + want: configsMap{ + "User": { + "name": { + props: map[string]any{ + "pattern": "some pattern", + }, + }, + }, + }, + }, + { + name: "bool values", + schema: ` + type User { + verified: Boolean # default: true + }`, + want: configsMap{ + "User": { + "verified": { + props: map[string]any{ + "default": true, + }, + }, + }, + }, + }, + { + name: "int values", + schema: ` + type User { + age: Int # min: 4, max: 10 + }`, + want: configsMap{ + "User": { + "age": { + props: map[string]any{ + "min": 4, + "max": 10, + }, + }, + }, + }, + }, + { + name: "float values", + schema: ` + type User { + rating: Float # min: 1.1, max: 5.5 + }`, + want: configsMap{ + "User": { + "rating": { + props: map[string]any{ + "min": 1.1, + "max": 5.5, + }, + }, + }, + }, + }, + { + name: "labels", + schema: ` + type User { + name: String # unique, indexed + }`, + want: configsMap{ + "User": { + "name": { + labels: []string{"unique", "indexed"}, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseConfig(tt.schema) + assert.NoError(t, err) + assert.Equal(t, tt.want, got) + }) + } +} + +func TestSchemaParser_IfCanNotParse_ReturnError(t *testing.T) { + tests := []struct { + name string + schema string + }{ + { + name: "missing value", + schema: ` + type User { + name: String # pattern: + }`, + }, + { + name: "missing prop name", + schema: ` + type User { + name: String # : 3 + }`, + }, + { + name: "no coma between props", + schema: ` + type User { + verified: Boolean # label1 label2 + }`, + }, + { + name: "invalid value", + schema: ` + type User { + age: Int # min: 4 5 + }`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := parseConfig(tt.schema) + assert.ErrorIs(t, err, NewErrFailedToParse("")) + }) + } +} + +func TestSchemaParser_ParseUnformattedSchema(t *testing.T) { + tests := []struct { + name string + schema string + expectEmpty bool + }{ + { + name: "flat schema", + schema: ` + type User { name: String }`, + expectEmpty: true, + }, + { + name: "closing bracket on a line with property", + schema: ` + type User { + name: String # len: 4 + rating: Float }`, + }, + { + name: "space after property name", + schema: ` + type User { + name : String # len: 4 + rating : Float + }`, + }, + { + name: "prop config on the same line with type", + schema: ` + type User { name: String # len: 4 + }`, + }, + { + name: "opening bracket on a new line", + schema: ` + type User + { name: String # len: 4 + }`, + }, + { + name: "2 props on the same line", + schema: ` + type User { + age: Int name: String # len: 4 + }`, + }, + { + name: "new type after closing bracket", + schema: ` + type Device { + model: String + } type User { + age: Int name: String # len: 4 + }`, + }, + { + name: "new type after closing bracket", + schema: ` + type Device { + model: String + } type User { + age: Int name: String # len: 4 + }`, + }, + { + name: "type name on a new line", + schema: ` + type + User { + age: Int name: String # len: 4 + }`, + }, + } + lenConf := configsMap{ + "User": { + "name": { + props: map[string]any{ + "len": 4, + }, + }, + }, + } + emptyConf := configsMap{} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseConfig(tt.schema) + assert.NoError(t, err) + expected := emptyConf + if !tt.expectEmpty { + expected = lenConf + } + assert.Equal(t, expected, got) + }) + } +} + +func TestSchemaParser_IgnoreNonPropertyComments(t *testing.T) { + tests := []struct { + name string + schema string + want configsMap + }{ + { + name: "closing bracket on a line with property", + schema: ` + ################ + # some comment + """ + another comment + """ + type User { + "prop comment" + name: String # len: 4 + # : # another comment : # + email: String # len: 10 + }`, + want: configsMap{ + "User": { + "name": { + props: map[string]any{ + "len": 4, + }, + }, + "email": { + props: map[string]any{ + "len": 10, + }, + }, + }, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseConfig(tt.schema) + assert.NoError(t, err) + assert.Equal(t, tt.want, got) + }) + } +} diff --git a/tests/integration/index/docs.go b/tests/integration/index/docs.go index 505eadf98d..379ad5a8a1 100644 --- a/tests/integration/index/docs.go +++ b/tests/integration/index/docs.go @@ -10,46 +10,44 @@ package index -type docsCollection struct { - colName string - docs []map[string]any +import "github.com/sourcenetwork/defradb/tests/predefined" + +func makeExplainQuery(req string) string { + return "query @explain(type: execute) " + req[6:] } -func getUserDocs() docsCollection { - return docsCollection{ - colName: "User", - docs: []map[string]any{ +func getUserDocs() predefined.DocsList { + return predefined.DocsList{ + ColName: "User", + Docs: []map[string]any{ { "name": "Shahzad", "age": 20, "verified": false, "email": "shahzad@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "iPhone Xs", - "year": 2022, - "type": "phone", - "specs": map[string]any{ - "CPU": 2.2, - "Chip": "Intel i3", - "RAM": 8, - "Storage": 512, - "OS": "iOS 12", - }, + "devices": []map[string]any{ + { + "model": "iPhone Xs", + "year": 2022, + "type": "phone", + "specs": map[string]any{ + "CPU": 2.2, + "Chip": "Intel i3", + "RAM": 8, + "Storage": 512, + "OS": "iOS 12", }, - { - "model": "MacBook Pro", - "year": 2020, - "type": "laptop", - "specs": map[string]any{ - "CPU": 2.4, - "Chip": "Intel i5", - "RAM": 16, - "Storage": 2048, - "OS": "Yosemite", - }, + }, + { + "model": "MacBook Pro", + "year": 2020, + "type": "laptop", + "specs": map[string]any{ + "CPU": 2.4, + "Chip": "Intel i5", + "RAM": 16, + "Storage": 2048, + "OS": "Yosemite", }, }, }, @@ -65,10 +63,7 @@ func getUserDocs() docsCollection { "age": 23, "verified": true, "email": "bruno@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{}, - }, + "devices": []map[string]any{}, "address": map[string]any{ "postalCode": 10001, "city": "New York", @@ -81,10 +76,7 @@ func getUserDocs() docsCollection { "age": 44, "verified": true, "email": "roy@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{}, - }, + "devices": []map[string]any{}, "address": map[string]any{ "postalCode": 90028, "city": "Los Angeles", @@ -97,32 +89,29 @@ func getUserDocs() docsCollection { "age": 28, "verified": false, "email": "fred@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "Samsung Galaxy S20", - "year": 2022, - "type": "phone", - "specs": map[string]any{ - "CPU": 2.0, - "Chip": "AMD Athlon", - "RAM": 8, - "Storage": 256, - "OS": "Android 11", - }, + "devices": []map[string]any{ + { + "model": "Samsung Galaxy S20", + "year": 2022, + "type": "phone", + "specs": map[string]any{ + "CPU": 2.0, + "Chip": "AMD Athlon", + "RAM": 8, + "Storage": 256, + "OS": "Android 11", }, - { - "model": "Lenovo ThinkPad", - "year": 2020, - "type": "laptop", - "specs": map[string]any{ - "CPU": 1.9, - "Chip": "AMD Ryzen", - "RAM": 8, - "Storage": 1024, - "OS": "Windows 10", - }, + }, + { + "model": "Lenovo ThinkPad", + "year": 2020, + "type": "laptop", + "specs": map[string]any{ + "CPU": 1.9, + "Chip": "AMD Ryzen", + "RAM": 8, + "Storage": 1024, + "OS": "Windows 10", }, }, }, @@ -138,44 +127,41 @@ func getUserDocs() docsCollection { "age": 30, "verified": false, "email": "john@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "Google Pixel 5", - "year": 2022, - "type": "phone", - "specs": map[string]any{ - "CPU": 2.4, - "Chip": "Octa-core", - "RAM": 16, - "Storage": 512, - "OS": "Android 11", - }, + "devices": []map[string]any{ + { + "model": "Google Pixel 5", + "year": 2022, + "type": "phone", + "specs": map[string]any{ + "CPU": 2.4, + "Chip": "Octa-core", + "RAM": 16, + "Storage": 512, + "OS": "Android 11", }, - { - "model": "Asus Vivobook", - "year": 2022, - "type": "laptop", - "specs": map[string]any{ - "CPU": 2.9, - "Chip": "Intel i7", - "RAM": 64, - "Storage": 2048, - "OS": "Windows 10", - }, + }, + { + "model": "Asus Vivobook", + "year": 2022, + "type": "laptop", + "specs": map[string]any{ + "CPU": 2.9, + "Chip": "Intel i7", + "RAM": 64, + "Storage": 2048, + "OS": "Windows 10", }, - { - "model": "Commodore 64", - "year": 1982, - "type": "computer", - "specs": map[string]any{ - "CPU": 0.1, - "Chip": "MOS 6510", - "RAM": 1, - "Storage": 1, - "OS": "Commodore BASIC 2.0", - }, + }, + { + "model": "Commodore 64", + "year": 1982, + "type": "computer", + "specs": map[string]any{ + "CPU": 0.1, + "Chip": "MOS 6510", + "RAM": 1, + "Storage": 1, + "OS": "Commodore BASIC 2.0", }, }, }, @@ -191,68 +177,65 @@ func getUserDocs() docsCollection { "age": 32, "verified": false, "email": "islam@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "iPhone 12s", - "year": 2018, - "type": "phone", - "specs": map[string]any{ - "CPU": 2.1, - "Chip": "A11 Bionic", - "RAM": 8, - "Storage": 1024, - "OS": "iOS 14", - }, + "devices": []map[string]any{ + { + "model": "iPhone 12s", + "year": 2018, + "type": "phone", + "specs": map[string]any{ + "CPU": 2.1, + "Chip": "A11 Bionic", + "RAM": 8, + "Storage": 1024, + "OS": "iOS 14", }, - { - "model": "MacBook Pro", - "year": 2023, - "type": "laptop", - "specs": map[string]any{ - "CPU": 2.6, - "Chip": "Apple M2 Max", - "RAM": 32, - "Storage": 1024, - "OS": "Sonoma 14", - }, + }, + { + "model": "MacBook Pro", + "year": 2023, + "type": "laptop", + "specs": map[string]any{ + "CPU": 2.6, + "Chip": "Apple M2 Max", + "RAM": 32, + "Storage": 1024, + "OS": "Sonoma 14", }, - { - "model": "iPad Pro", - "year": 2020, - "type": "tablet", - "specs": map[string]any{ - "CPU": 2.1, - "Chip": "Intel i5", - "RAM": 8, - "Storage": 512, - "OS": "iOS 14", - }, + }, + { + "model": "iPad Pro", + "year": 2020, + "type": "tablet", + "specs": map[string]any{ + "CPU": 2.1, + "Chip": "Intel i5", + "RAM": 8, + "Storage": 512, + "OS": "iOS 14", }, - { - "model": "Playstation 5", - "year": 2022, - "type": "game_console", - "specs": map[string]any{ - "CPU": 3.5, - "Chip": "AMD Zen 2", - "RAM": 16, - "Storage": 825, - "OS": "FreeBSD", - }, + }, + { + "model": "Playstation 5", + "year": 2022, + "type": "game_console", + "specs": map[string]any{ + "CPU": 3.5, + "Chip": "AMD Zen 2", + "RAM": 16, + "Storage": 825, + "OS": "FreeBSD", }, - { - "model": "Nokia 7610", - "year": 2003, - "type": "phone", - "specs": map[string]any{ - "CPU": 1.8, - "Chip": "Cortex A710", - "RAM": 12, - "Storage": 2, - "OS": "Symbian 7.0", - }, + }, + { + "model": "Nokia 7610", + "year": 2003, + "type": "phone", + "specs": map[string]any{ + "CPU": 1.8, + "Chip": "Cortex A710", + "RAM": 12, + "Storage": 2, + "OS": "Symbian 7.0", }, }, }, @@ -268,32 +251,29 @@ func getUserDocs() docsCollection { "age": 33, "verified": true, "email": "andy@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "Xiaomi Phone", - "year": 2022, - "type": "phone", - "specs": map[string]any{ - "CPU": 1.6, - "Chip": "AMD Octen", - "RAM": 8, - "Storage": 512, - "OS": "Android 11", - }, + "devices": []map[string]any{ + { + "model": "Xiaomi Phone", + "year": 2022, + "type": "phone", + "specs": map[string]any{ + "CPU": 1.6, + "Chip": "AMD Octen", + "RAM": 8, + "Storage": 512, + "OS": "Android 11", }, - { - "model": "Alienware x16", - "year": 2018, - "type": "laptop", - "specs": map[string]any{ - "CPU": 3.2, - "Chip": "Intel i7", - "RAM": 64, - "Storage": 2048, - "OS": "Windows 9", - }, + }, + { + "model": "Alienware x16", + "year": 2018, + "type": "laptop", + "specs": map[string]any{ + "CPU": 3.2, + "Chip": "Intel i7", + "RAM": 64, + "Storage": 2048, + "OS": "Windows 9", }, }, }, @@ -309,56 +289,53 @@ func getUserDocs() docsCollection { "age": 42, "verified": true, "email": "addo@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "iPhone 10", - "year": 2021, - "type": "phone", - "specs": map[string]any{ - "CPU": 1.8, - "Chip": "Intel i3", - "RAM": 8, - "Storage": 256, - "OS": "iOS 12", - }, + "devices": []map[string]any{ + { + "model": "iPhone 10", + "year": 2021, + "type": "phone", + "specs": map[string]any{ + "CPU": 1.8, + "Chip": "Intel i3", + "RAM": 8, + "Storage": 256, + "OS": "iOS 12", }, - { - "model": "Acer Aspire 5", - "year": 2020, - "type": "laptop", - "specs": map[string]any{ - "CPU": 2.0, - "Chip": "Intel i5", - "RAM": 16, - "Storage": 512, - "OS": "Windows 10", - }, + }, + { + "model": "Acer Aspire 5", + "year": 2020, + "type": "laptop", + "specs": map[string]any{ + "CPU": 2.0, + "Chip": "Intel i5", + "RAM": 16, + "Storage": 512, + "OS": "Windows 10", }, - { - "model": "HyperX Headset", - "year": 2014, - "type": "headset", - "specs": map[string]any{ - "CPU": nil, - "Chip": nil, - "RAM": nil, - "Storage": nil, - "OS": nil, - }, + }, + { + "model": "HyperX Headset", + "year": 2014, + "type": "headset", + "specs": map[string]any{ + "CPU": "N/A", + "Chip": "N/A", + "RAM": "N/A", + "Storage": "N/A", + "OS": "N/A", }, - { - "model": "Playstation 5", - "year": 2021, - "type": "game_console", - "specs": map[string]any{ - "CPU": 3.5, - "Chip": "AMD Zen 2", - "RAM": 16, - "Storage": 825, - "OS": "FreeBSD", - }, + }, + { + "model": "Playstation 5", + "year": 2021, + "type": "game_console", + "specs": map[string]any{ + "CPU": 3.5, + "Chip": "AMD Zen 2", + "RAM": 16, + "Storage": 825, + "OS": "FreeBSD", }, }, }, @@ -374,44 +351,41 @@ func getUserDocs() docsCollection { "age": 48, "verified": true, "email": "keenan@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "iPhone 13", - "year": 2022, - "type": "phone", - "specs": map[string]any{ - "CPU": 2.3, - "Chip": "M1", - "RAM": 8, - "Storage": 1024, - "OS": "iOS 14", - }, + "devices": []map[string]any{ + { + "model": "iPhone 13", + "year": 2022, + "type": "phone", + "specs": map[string]any{ + "CPU": 2.3, + "Chip": "M1", + "RAM": 8, + "Storage": 1024, + "OS": "iOS 14", }, - { - "model": "MacBook Pro", - "year": 2017, - "type": "laptop", - "specs": map[string]any{ - "CPU": 2.0, - "Chip": "A11 Bionic", - "RAM": 16, - "Storage": 512, - "OS": "Ventura", - }, + }, + { + "model": "MacBook Pro", + "year": 2017, + "type": "laptop", + "specs": map[string]any{ + "CPU": 2.0, + "Chip": "A11 Bionic", + "RAM": 16, + "Storage": 512, + "OS": "Ventura", }, - { - "model": "iPad Mini", - "year": 2015, - "type": "tablet", - "specs": map[string]any{ - "CPU": 1.9, - "Chip": "Intel i3", - "RAM": 8, - "Storage": 1024, - "OS": "iOS 12", - }, + }, + { + "model": "iPad Mini", + "year": 2015, + "type": "tablet", + "specs": map[string]any{ + "CPU": 1.9, + "Chip": "Intel i3", + "RAM": 8, + "Storage": 1024, + "OS": "iOS 12", }, }, }, @@ -427,20 +401,17 @@ func getUserDocs() docsCollection { "age": 55, "verified": true, "email": "chris@gmail.com", - "devices": docsCollection{ - colName: "Device", - docs: []map[string]any{ - { - "model": "Walkman", - "year": 2000, - "type": "phone", - "specs": map[string]any{ - "CPU": 1.8, - "Chip": "Cortex-A53 ", - "RAM": 8, - "Storage": 256, - "OS": "Android 11", - }, + "devices": []map[string]any{ + { + "model": "Walkman", + "year": 2000, + "type": "phone", + "specs": map[string]any{ + "CPU": 1.8, + "Chip": "Cortex-A53 ", + "RAM": 8, + "Storage": 256, + "OS": "Android 11", }, }, }, diff --git a/tests/integration/index/query_performance_test.go b/tests/integration/index/query_performance_test.go index eec8a13f4b..27de725724 100644 --- a/tests/integration/index/query_performance_test.go +++ b/tests/integration/index/query_performance_test.go @@ -11,51 +11,43 @@ package index import ( - "fmt" "testing" + "github.com/sourcenetwork/defradb/tests/gen" testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func generateDocsForCollection(colIndex, count int) []any { - result := make([]any, 0, count) - for i := 0; i < count; i++ { - result = append(result, testUtils.CreateDoc{ - CollectionID: colIndex, - Doc: fmt.Sprintf(`{ - "name": "name-%d", - "age": %d, - "email": "email%d@gmail.com" - }`, i, i%100, i), - }) - } - return result -} - func TestQueryPerformance_Simple(t *testing.T) { const benchReps = 10 - const numDocs = 500 + + getOptions := func(col string) []gen.Option { + return []gen.Option{ + gen.WithTypeDemand(col, 500), + gen.WithFieldRange(col, "age", 0, 99), + } + } test1 := testUtils.TestCase{ Actions: []any{ - testUtils.SchemaUpdate{Schema: ` - type User { - name: String - age: Int - email: String - } - `}, testUtils.SchemaUpdate{ Schema: ` - type IndexedUser { - name: String - age: Int @index - email: String - } - `, + type User { + name: String + age: Int + email: String + }`, + }, + testUtils.SchemaUpdate{ + Schema: ` + type IndexedUser { + name: String + age: Int @index + email: String + }`, + }, + testUtils.GenerateDocs{ + Options: append(getOptions("User"), getOptions("IndexedUser")...), }, - generateDocsForCollection(0, numDocs), - generateDocsForCollection(1, numDocs), testUtils.Benchmark{ Reps: benchReps, BaseCase: testUtils.Request{Request: ` @@ -77,7 +69,7 @@ func TestQueryPerformance_Simple(t *testing.T) { }`, }, FocusClients: []testUtils.ClientType{testUtils.GoClientType}, - Factor: 5, + Factor: 2, }, }, } diff --git a/tests/integration/index/query_with_index_combined_filter_test.go b/tests/integration/index/query_with_index_combined_filter_test.go index e5673d1ccf..8faf5fa37a 100644 --- a/tests/integration/index/query_with_index_combined_filter_test.go +++ b/tests/integration/index/query_with_index_combined_filter_test.go @@ -28,12 +28,16 @@ func TestQueryWithIndex_IfIndexFilterWithRegular_ShouldFilter(t *testing.T) { test := testUtils.TestCase{ Description: "Combination of a filter on regular and of an indexed field", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -63,13 +67,17 @@ func TestQueryWithIndex_IfMultipleIndexFiltersWithRegular_ShouldFilter(t *testin test := testUtils.TestCase{ Description: "Combination of a filter on regular and of 2 indexed fields", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int @index - email: String - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int @index + email: String + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ diff --git a/tests/integration/index/query_with_index_only_filter_test.go b/tests/integration/index/query_with_index_only_filter_test.go index 098163b307..82779c5832 100644 --- a/tests/integration/index/query_with_index_only_filter_test.go +++ b/tests/integration/index/query_with_index_only_filter_test.go @@ -26,12 +26,16 @@ func TestQueryWithIndex_WithNonIndexedFields_ShouldFetchAllOfThem(t *testing.T) test := testUtils.TestCase{ Description: "If there are non-indexed fields in the query, they should be fetched", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{{ @@ -58,11 +62,15 @@ func TestQueryWithIndex_WithEqualFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _eq filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -88,12 +96,16 @@ func TestQueryWithIndex_IfSeveralDocsWithEqFilter_ShouldFetchAll(t *testing.T) { test := testUtils.TestCase{ Description: "If there are several docs matching _eq filter, they should be fetched", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.CreateDoc{ CollectionID: 0, Doc: `{ @@ -127,12 +139,16 @@ func TestQueryWithIndex_WithGreaterThanFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _gt filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -158,12 +174,16 @@ func TestQueryWithIndex_WithGreaterOrEqualFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _ge filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -190,12 +210,16 @@ func TestQueryWithIndex_WithLessThanFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _lt filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -221,12 +245,16 @@ func TestQueryWithIndex_WithLessOrEqualFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _le filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -253,12 +281,16 @@ func TestQueryWithIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _ne filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -292,12 +324,16 @@ func TestQueryWithIndex_WithInFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _in filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -324,12 +360,16 @@ func TestQueryWithIndex_IfSeveralDocsWithInFilter_ShouldFetchAll(t *testing.T) { test := testUtils.TestCase{ Description: "If there are several docs matching _in filter, they should be fetched", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.CreateDoc{ CollectionID: 0, Doc: `{ @@ -363,12 +403,16 @@ func TestQueryWithIndex_WithNotInFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _nin filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -422,12 +466,16 @@ func TestQueryWithIndex_WithLikeFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _like filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - email: String @index - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + email: String @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req1, Results: []map[string]any{ @@ -505,12 +553,16 @@ func TestQueryWithIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) { test := testUtils.TestCase{ Description: "Test index filtering with _nlike filter", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String @index - age: Int - } - `), + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ diff --git a/tests/integration/index/query_with_relation_filter_test.go b/tests/integration/index/query_with_relation_filter_test.go index 4a217e931c..57a43bf69e 100644 --- a/tests/integration/index/query_with_relation_filter_test.go +++ b/tests/integration/index/query_with_relation_filter_test.go @@ -16,6 +16,68 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) +func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilter2(t *testing.T) { + req1 := `query { + User(filter: { + devices: {model: {_eq: "MacBook Pro"}} + }) { + name + } + }` + req2 := `query { + User(filter: { + devices: {model: {_eq: "iPhone 10"}} + }) { + name + } + }` + test := testUtils.TestCase{ + Description: "Filter on indexed relation field in 1-N relation", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + devices: [Device] + } + + type Device { + model: String @index + owner: User + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req1, + Results: []map[string]any{ + {"name": "Islam"}, + {"name": "Shahzad"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req1), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(6).WithFieldFetches(9).WithIndexFetches(3), + }, + testUtils.Request{ + Request: req2, + Results: []map[string]any{ + {"name": "Addo"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req2), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(3).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilter(t *testing.T) { req1 := `query { User(filter: { @@ -34,18 +96,22 @@ func TestQueryWithIndexOnOneToManyRelation_IfFilterOnIndexedRelation_ShouldFilte test := testUtils.TestCase{ Description: "Filter on indexed relation field in 1-N relation", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int - devices: [Device] - } + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + devices: [Device] + } - type Device { - model: String @index - owner: User - } - `), + type Device { + model: String @index + owner: User + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req1, Results: []map[string]any{ @@ -92,18 +158,22 @@ func TestQueryWithIndexOnOneToOnesSecondaryRelation_IfFilterOnIndexedRelation_Sh test := testUtils.TestCase{ Description: "Filter on indexed secondary relation field in 1-1 relation", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int - address: Address - } + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + address: Address + } - type Address { - user: User - city: String @index - } - `), + type Address { + user: User + city: String @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req1, Results: []map[string]any{ @@ -150,19 +220,23 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedFieldOfRelatio test := testUtils.TestCase{ Description: "Filter on indexed field of primary relation in 1-1 relation", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int - address: Address @primary - } + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + address: Address @primary + } - type Address { - user: User - city: String @index - street: String - } - `), + type Address { + user: User + city: String @index + street: String + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req1, Results: []map[string]any{ @@ -202,19 +276,23 @@ func TestQueryWithIndexOnOneToOnePrimaryRelation_IfFilterOnIndexedRelationWhileI test := testUtils.TestCase{ Description: "Filter on indexed field of primary relation while having indexed foreign field in 1-1 relation", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int - address: Address @primary @index - } + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + address: Address @primary @index + } - type Address { - user: User - city: String @index - street: String - } - `), + type Address { + user: User + city: String @index + street: String + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req, Results: []map[string]any{ @@ -255,24 +333,28 @@ func TestQueryWithIndexOnOneToTwoRelation_IfFilterOnIndexedRelation_ShouldFilter test := testUtils.TestCase{ Description: "Filter on indexed relation field in 1-1 and 1-N relations", Actions: []any{ - createSchemaWithDocs(` - type User { - name: String - age: Int - address: Address - devices: [Device] - } + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + address: Address + devices: [Device] + } - type Device { - model: String @index - owner: User - } + type Device { + model: String @index + owner: User + } - type Address { - user: User - city: String @index - } - `), + type Address { + user: User + city: String @index + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, testUtils.Request{ Request: req1, Results: []map[string]any{ diff --git a/tests/integration/index/utils.go b/tests/integration/index/utils.go deleted file mode 100644 index bb6cb89f14..0000000000 --- a/tests/integration/index/utils.go +++ /dev/null @@ -1,290 +0,0 @@ -// Copyright 2023 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package index - -import ( - "fmt" - "strings" - - "github.com/sourcenetwork/immutable" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/client/request" - testUtils "github.com/sourcenetwork/defradb/tests/integration" -) - -// createSchemaWithDocs returns UpdateSchema action and CreateDoc actions -// with the documents that match the schema. -// The schema is parsed to get the list of properties, and the docs -// are created with the same properties. -// This allows us to have only one large list of docs with predefined -// properties, and create schemas with different properties from it. -func createSchemaWithDocs(schema string) []any { - userDocs := getUserDocs() - resultActions := make([]any, 0, len(userDocs.docs)+1) - resultActions = append(resultActions, testUtils.SchemaUpdate{Schema: schema}) - parser := schemaParser{} - typeDefs := parser.Parse(schema) - generator := createDocGenerator{types: typeDefs} - for _, doc := range userDocs.docs { - actions := generator.GenerateDocs(doc, userDocs.colName) - resultActions = append(resultActions, actions...) - } - return resultActions -} - -type createDocGenerator struct { - types map[string]typeDefinition -} - -func createDocJSON(doc map[string]any, typeDef *typeDefinition) string { - sb := strings.Builder{} - for propName := range doc { - format := `"%s": %v` - if _, isStr := doc[propName].(string); isStr { - format = `"%s": "%v"` - } - if sb.Len() == 0 { - sb.WriteString("{\n") - } else { - sb.WriteString(",\n") - } - sb.WriteString(fmt.Sprintf(format, propName, doc[propName])) - } - sb.WriteString("\n}") - return sb.String() -} - -func toRequestedDoc(doc map[string]any, typeDef *typeDefinition) map[string]any { - result := make(map[string]any) - for _, prop := range typeDef.props { - if prop.isRelation { - continue - } - result[prop.name] = doc[prop.name] - } - for name, val := range doc { - if strings.HasSuffix(name, request.RelatedObjectID) { - result[name] = val - } - } - return result -} - -func (this *createDocGenerator) generatePrimary( - doc map[string]any, - typeDef *typeDefinition, -) (map[string]any, []any) { - result := []any{} - requested := toRequestedDoc(doc, typeDef) - for _, prop := range typeDef.props { - if prop.isRelation { - if _, hasProp := doc[prop.name]; hasProp { - if prop.isPrimary.Value() { - subType := this.types[prop.typeStr] - subDoc := toRequestedDoc(doc[prop.name].(map[string]any), &subType) - jsonSubDoc := createDocJSON(subDoc, &subType) - clientSubDoc, err := client.NewDocFromJSON([]byte(jsonSubDoc)) - if err != nil { - panic("Failed to create doc from JSON: " + err.Error()) - } - requested[prop.name+request.RelatedObjectID] = clientSubDoc.Key().String() - result = append(result, testUtils.CreateDoc{CollectionID: subType.index, Doc: jsonSubDoc}) - } - } - } - } - return requested, result -} - -func (this *createDocGenerator) GenerateDocs(doc map[string]any, typeName string) []any { - typeDef := this.types[typeName] - - requested, result := this.generatePrimary(doc, &typeDef) - docStr := createDocJSON(requested, &typeDef) - - result = append(result, testUtils.CreateDoc{CollectionID: typeDef.index, Doc: docStr}) - - var docKey string - for _, prop := range typeDef.props { - if prop.isRelation { - if _, hasProp := doc[prop.name]; hasProp { - if !prop.isPrimary.Value() { - if docKey == "" { - clientDoc, err := client.NewDocFromJSON([]byte(docStr)) - if err != nil { - panic("Failed to create doc from JSON: " + err.Error()) - } - docKey = clientDoc.Key().String() - } - actions := this.generateSecondaryDocs(doc, typeName, &prop, docKey) - result = append(result, actions...) - } - } - } - } - return result -} - -func (this *createDocGenerator) generateSecondaryDocs( - primaryDoc map[string]any, - primaryTypeName string, - relProp *propDefinition, - primaryDocKey string, -) []any { - result := []any{} - relTypeDef := this.types[relProp.typeStr] - primaryPropName := "" - for _, relDocProp := range relTypeDef.props { - if relDocProp.typeStr == primaryTypeName && relDocProp.isPrimary.Value() { - primaryPropName = relDocProp.name + request.RelatedObjectID - switch relVal := primaryDoc[relProp.name].(type) { - case docsCollection: - for _, relDoc := range relVal.docs { - relDoc[primaryPropName] = primaryDocKey - actions := this.GenerateDocs(relDoc, relTypeDef.name) - result = append(result, actions...) - } - case map[string]any: - relVal[primaryPropName] = primaryDocKey - actions := this.GenerateDocs(relVal, relTypeDef.name) - result = append(result, actions...) - } - } - } - return result -} - -type propDefinition struct { - name string - typeStr string - isArray bool - isRelation bool - isPrimary immutable.Option[bool] -} - -type typeDefinition struct { - name string - index int - props map[string]propDefinition -} - -type schemaParser struct { - types map[string]typeDefinition - schemaLines []string - firstRelationType string - currentTypeDef typeDefinition - relationTypesMap map[string]map[string]string -} - -func (p *schemaParser) Parse(schema string) map[string]typeDefinition { - p.types = make(map[string]typeDefinition) - p.relationTypesMap = make(map[string]map[string]string) - p.schemaLines = strings.Split(schema, "\n") - p.findTypes() - - for _, line := range p.schemaLines { - line = strings.TrimSpace(line) - if strings.HasPrefix(line, "type ") { - typeNameEndPos := strings.Index(line[5:], " ") - typeName := strings.TrimSpace(line[5 : 5+typeNameEndPos]) - p.currentTypeDef = p.types[typeName] - continue - } - if strings.HasPrefix(line, "}") { - p.types[p.currentTypeDef.name] = p.currentTypeDef - continue - } - pos := strings.Index(line, ":") - if pos != -1 { - p.defineProp(line, pos) - } - } - p.resolvePrimaryRelations() - return p.types -} - -func (p *schemaParser) findTypes() { - typeIndex := 0 - for _, line := range p.schemaLines { - line = strings.TrimSpace(line) - if strings.HasPrefix(line, "type ") { - typeNameEndPos := strings.Index(line[5:], " ") - typeName := strings.TrimSpace(line[5 : 5+typeNameEndPos]) - p.types[typeName] = typeDefinition{name: typeName, index: typeIndex, props: make(map[string]propDefinition)} - typeIndex++ - } - } -} - -func (p *schemaParser) defineProp(line string, pos int) { - prop := propDefinition{name: line[:pos]} - prop.typeStr = strings.TrimSpace(line[pos+1:]) - typeEndPos := strings.Index(prop.typeStr, " ") - if typeEndPos != -1 { - prop.typeStr = prop.typeStr[:typeEndPos] - } - if prop.typeStr[0] == '[' { - prop.isArray = true - prop.typeStr = prop.typeStr[1 : len(prop.typeStr)-1] - } - if _, isRelation := p.types[prop.typeStr]; isRelation { - prop.isRelation = true - if prop.isArray { - prop.isPrimary = immutable.Some(false) - } else if strings.Contains(line[pos+len(prop.typeStr)+2:], "@primary") { - prop.isPrimary = immutable.Some(true) - } - relMap := p.relationTypesMap[prop.typeStr] - if relMap == nil { - relMap = make(map[string]string) - } - relMap[prop.name] = p.currentTypeDef.name - p.relationTypesMap[prop.typeStr] = relMap - if p.firstRelationType == "" { - p.firstRelationType = p.currentTypeDef.name - } - } - p.currentTypeDef.props[prop.name] = prop -} - -func (p *schemaParser) resolvePrimaryRelations() { - for typeName, relationProps := range p.relationTypesMap { - typeDef := p.types[typeName] - for _, prop := range typeDef.props { - for relPropName, relPropType := range relationProps { - if prop.typeStr == relPropType { - relatedTypeDef := p.types[relPropType] - relatedProp := relatedTypeDef.props[relPropName] - if !relatedProp.isPrimary.HasValue() { - relatedProp.isPrimary = immutable.Some(typeName == p.firstRelationType) - relatedTypeDef.props[relPropName] = relatedProp - p.types[relPropType] = relatedTypeDef - delete(p.relationTypesMap, relPropType) - } - if !prop.isPrimary.HasValue() { - val := typeName != p.firstRelationType - if relatedProp.isPrimary.HasValue() { - val = !relatedProp.isPrimary.Value() - } - prop.isPrimary = immutable.Some(val) - typeDef.props[prop.name] = prop - } - } - } - } - p.types[typeName] = typeDef - } -} - -func makeExplainQuery(req string) string { - return "query @explain(type: execute) " + req[6:] -} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 112a497dc8..6ebe6242b3 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -17,6 +17,8 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/tests/gen" + "github.com/sourcenetwork/defradb/tests/predefined" ) // TestCase contains the details of the test case to execute. @@ -338,6 +340,32 @@ type Request struct { ExpectedError string } +// GenerateDocs is an action that will trigger generation of documents. +type GenerateDocs struct { + // NodeID may hold the ID (index) of a node to execute the generation on. + // + // If a value is not provided the docs generation will be executed against all nodes, + NodeID immutable.Option[int] + + // Options to be passed to the auto doc generator. + Options []gen.Option + + // The list of collection names to generate docs for. + // If not provided, docs will be generated for all collections. + ForCollections []string +} + +// CreatePredefinedDocs is an action that will trigger creation of predefined documents. +type CreatePredefinedDocs struct { + // NodeID may hold the ID (index) of a node to execute the generation on. + // + // If a value is not provided the docs generation will be executed against all nodes, + NodeID immutable.Option[int] + + // The list of documents to replicate. + Docs predefined.DocsList +} + // TransactionCommit represents a commit request for a transaction of the given id. type TransactionCommit struct { // Used to identify the transaction to commit. diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index a9480c15ec..d414cc1ca4 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -20,6 +20,7 @@ import ( "testing" "time" + "github.com/bxcodec/faker/support/slice" "github.com/libp2p/go-libp2p/core/crypto" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" @@ -33,6 +34,8 @@ import ( "github.com/sourcenetwork/defradb/net" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" "github.com/sourcenetwork/defradb/tests/clients" + "github.com/sourcenetwork/defradb/tests/gen" + "github.com/sourcenetwork/defradb/tests/predefined" ) const mutationTypeEnvName = "DEFRA_MUTATION_TYPE" @@ -314,6 +317,12 @@ func performAction( case Benchmark: benchmarkAction(s, actionIndex, action) + case GenerateDocs: + generateDocs(s, action) + + case CreatePredefinedDocs: + generatePredefinedDocs(s, action) + case SetupComplete: // no-op, just continue. @@ -322,6 +331,48 @@ func performAction( } } +func createGenerateDocs(s *state, docs []gen.GeneratedDoc, nodeID immutable.Option[int]) { + nameToInd := make(map[string]int) + for i, name := range s.collectionNames { + nameToInd[name] = i + } + for _, doc := range docs { + docJSON, err := doc.Doc.String() + if err != nil { + s.t.Fatalf("Failed to generate docs %s", err) + } + createDoc(s, CreateDoc{CollectionID: nameToInd[doc.Col.Description.Name], Doc: docJSON, NodeID: nodeID}) + } +} + +func generateDocs(s *state, action GenerateDocs) { + collections := getNodeCollections(action.NodeID, s.collections) + defs := make([]client.CollectionDefinition, 0, len(collections[0])) + for _, col := range collections[0] { + if len(action.ForCollections) == 0 || slice.Contains(action.ForCollections, col.Name()) { + defs = append(defs, col.Definition()) + } + } + docs, err := gen.AutoGenerate(defs, action.Options...) + if err != nil { + s.t.Fatalf("Failed to generate docs %s", err) + } + createGenerateDocs(s, docs, action.NodeID) +} + +func generatePredefinedDocs(s *state, action CreatePredefinedDocs) { + collections := getNodeCollections(action.NodeID, s.collections) + defs := make([]client.CollectionDefinition, 0, len(collections[0])) + for _, col := range collections[0] { + defs = append(defs, col.Definition()) + } + docs, err := predefined.Create(defs, action.Docs) + if err != nil { + s.t.Fatalf("Failed to generate docs %s", err) + } + createGenerateDocs(s, docs, action.NodeID) +} + func benchmarkAction( s *state, actionIndex int, @@ -386,27 +437,7 @@ func getCollectionNames(testCase TestCase) []string { continue } - // WARNING: This will not work with schemas ending in `type`, e.g. `user_type` - splitByType := strings.Split(action.Schema, "type ") - // Skip the first, as that preceeds `type ` if `type ` is present, - // else there are no types. - for i := 1; i < len(splitByType); i++ { - wipSplit := strings.TrimLeft(splitByType[i], " ") - indexOfLastChar := strings.IndexAny(wipSplit, " {") - if indexOfLastChar <= 0 { - // This should never happen - continue - } - - collectionName := wipSplit[:indexOfLastChar] - if _, ok := collectionIndexByName[collectionName]; ok { - // Collection name has already been added, possibly via another node - continue - } - - collectionIndexByName[collectionName] = nextIndex - nextIndex++ - } + nextIndex = getCollectionNamesFromSchema(collectionIndexByName, action.Schema, nextIndex) } } @@ -418,6 +449,31 @@ func getCollectionNames(testCase TestCase) []string { return collectionNames } +func getCollectionNamesFromSchema(result map[string]int, schema string, nextIndex int) int { + // WARNING: This will not work with schemas ending in `type`, e.g. `user_type` + splitByType := strings.Split(schema, "type ") + // Skip the first, as that preceeds `type ` if `type ` is present, + // else there are no types. + for i := 1; i < len(splitByType); i++ { + wipSplit := strings.TrimLeft(splitByType[i], " ") + indexOfLastChar := strings.IndexAny(wipSplit, " {") + if indexOfLastChar <= 0 { + // This should never happen + continue + } + + collectionName := wipSplit[:indexOfLastChar] + if _, ok := result[collectionName]; ok { + // Collection name has already been added, possibly via another node + continue + } + + result[collectionName] = nextIndex + nextIndex++ + } + return nextIndex +} + // closeNodes closes all the given nodes, ensuring that resources are properly released. func closeNodes( s *state, diff --git a/tests/predefined/README.md b/tests/predefined/README.md new file mode 100644 index 0000000000..d70cafab4f --- /dev/null +++ b/tests/predefined/README.md @@ -0,0 +1,56 @@ +# Creation of Predefined Documents + +`Create` and `CreateFromSDL` can be used to generate predefined documents. + +They accepts the predefined list of documents `DocList` that in turn might include nested documents. + +The fields in `DocList` might be a superset of the fields in the schema. +In that case, only the fields in the schema will be considered. + + +For example, for the following schema: +```graphql +type User { + name: String + devices: [Device] +} + +type Device { + model: String + owner: User +} +``` +if the `DocList` is as follows: +```go +gen.DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "Shahzad", + "age": 20, + "verified": false, + "email": "shahzad@gmail.com", + "devices": []map[string]any{ + { + "model": "iPhone Xs", + "year": 2022, + "type": "phone", + }}, + }}, +} +``` +only the following doc will be considered: +```go +gen.DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "Shahzad", + "devices": []map[string]any{ + { + "model": "iPhone Xs", + }}, + }}, +} +``` +This allows having a predefined large list of documents (and sub-documents) and only use a subset of field for a particular test case. \ No newline at end of file diff --git a/tests/predefined/defs.go b/tests/predefined/defs.go new file mode 100644 index 0000000000..325b318dda --- /dev/null +++ b/tests/predefined/defs.go @@ -0,0 +1,34 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package predefined + +// DocsList is a list of document structures that might nest other documents to be replicated +// by a document generator. +// +// gen.DocsList{ +// ColName: "User", +// Docs: []map[string]any{ +// { +// "name": "Shahzad", +// "age": 20, +// "devices": []map[string]any{ +// { +// "model": "iPhone Xs", +// }, +// }, +// }, +// }, +type DocsList struct { + // ColName is the name of the collection that the documents in Docs belong to. + ColName string + // Docs is a list of documents to be replicated. + Docs []map[string]any +} diff --git a/tests/predefined/errors.go b/tests/predefined/errors.go new file mode 100644 index 0000000000..f4ca175aeb --- /dev/null +++ b/tests/predefined/errors.go @@ -0,0 +1,21 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package predefined + +import "github.com/sourcenetwork/defradb/errors" + +const ( + errFailedToGenerateDoc string = "failed to generate doc" +) + +func NewErrFailedToGenerateDoc(inner error) error { + return errors.Wrap(errFailedToGenerateDoc, inner) +} diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go new file mode 100644 index 0000000000..4adce90805 --- /dev/null +++ b/tests/predefined/gen_predefined.go @@ -0,0 +1,251 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package predefined + +import ( + "context" + "strings" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/request/graphql" + "github.com/sourcenetwork/defradb/tests/gen" +) + +func parseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { + parser, err := graphql.NewParser() + if err != nil { + return nil, err + } + cols, err := parser.ParseSDL(context.Background(), gqlSDL) + if err != nil { + return nil, err + } + result := make(map[string]client.CollectionDefinition) + for _, col := range cols { + result[col.Description.Name] = col + } + return result, nil +} + +// CreateFromSDL generates documents for GraphQL SDL from a predefined list +// of docs that might include nested docs. +// The SDL is parsed to get the list of fields, and the docs +// are created with the fields parsed from the SDL. +// This allows us to have only one large list of docs with predefined +// fields, and create SDLs with different fields from it. +func CreateFromSDL(gqlSDL string, docsList DocsList) ([]gen.GeneratedDoc, error) { + resultDocs := make([]gen.GeneratedDoc, 0, len(docsList.Docs)) + typeDefs, err := parseSDL(gqlSDL) + if err != nil { + return nil, err + } + generator := docGenerator{types: typeDefs} + for _, doc := range docsList.Docs { + docs, err := generator.generateRelatedDocs(doc, docsList.ColName) + if err != nil { + return nil, err + } + resultDocs = append(resultDocs, docs...) + } + return resultDocs, nil +} + +// Create generates documents from a predefined list +// of docs that might include nested docs. +// +// For example it can be used to generate docs from this list: +// +// gen.DocsList{ +// ColName: "User", +// Docs: []map[string]any{ +// { +// "name": "Shahzad", +// "age": 20, +// "devices": []map[string]any{ +// { +// "model": "iPhone Xs", +// }, +// }, +// }, +// }, +// ... +// +// It will generator documents for `User` collection replicating the given structure, i.e. +// creating devices as related secondary documents. +func Create(defs []client.CollectionDefinition, docsList DocsList) ([]gen.GeneratedDoc, error) { + resultDocs := make([]gen.GeneratedDoc, 0, len(docsList.Docs)) + typeDefs := make(map[string]client.CollectionDefinition) + for _, col := range defs { + typeDefs[col.Description.Name] = col + } + generator := docGenerator{types: typeDefs} + for _, doc := range docsList.Docs { + docs, err := generator.generateRelatedDocs(doc, docsList.ColName) + if err != nil { + return nil, err + } + resultDocs = append(resultDocs, docs...) + } + return resultDocs, nil +} + +type docGenerator struct { + types map[string]client.CollectionDefinition +} + +// toRequestedDoc removes the fields that are not in the schema of the collection. +// +// This is typically called on user/test provided seed documents to remove any non-existent +// fields before generating documents from them. +// It doesn't not modify the original doc. +func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) map[string]any { + result := make(map[string]any) + for _, field := range typeDef.Schema.Fields { + if field.IsRelation() || field.Name == request.KeyFieldName { + continue + } + result[field.Name] = doc[field.Name] + } + for name, val := range doc { + if strings.HasSuffix(name, request.RelatedObjectID) { + result[name] = val + } + } + return result +} + +// generatePrimary generates primary docs for the given secondary doc and adds foreign keys +// to the secondary doc to reference the primary docs. +func (this *docGenerator) generatePrimary( + secDocMap map[string]any, + secType *client.CollectionDefinition, +) (map[string]any, []gen.GeneratedDoc, error) { + result := []gen.GeneratedDoc{} + requestedSecondary := toRequestedDoc(secDocMap, secType) + for _, secDocField := range secType.Schema.Fields { + if secDocField.IsRelation() { + if secDocMapField, hasField := secDocMap[secDocField.Name]; hasField { + if secDocField.IsPrimaryRelation() { + primType := this.types[secDocField.Schema] + primDocMap, subResult, err := this.generatePrimary( + secDocMap[secDocField.Name].(map[string]any), &primType) + if err != nil { + return nil, nil, NewErrFailedToGenerateDoc(err) + } + primDoc, err := client.NewDocFromMap(primDocMap) + if err != nil { + return nil, nil, NewErrFailedToGenerateDoc(err) + } + docKey := primDoc.Key().String() + requestedSecondary[secDocField.Name+request.RelatedObjectID] = docKey + subResult = append(subResult, gen.GeneratedDoc{Col: &primType, Doc: primDoc}) + result = append(result, subResult...) + + secondaryDocs, err := this.generateSecondaryDocs( + secDocMapField.(map[string]any), docKey, &primType, secType.Description.Name) + if err != nil { + return nil, nil, err + } + result = append(result, secondaryDocs...) + } + } + } + } + return requestedSecondary, result, nil +} + +// generateRelatedDocs generates related docs (primary and secondary) for the given doc and +// adds foreign keys to the given doc to reference the primary docs. +func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName string) ([]gen.GeneratedDoc, error) { + typeDef := this.types[typeName] + + // create first primary docs and link them to the given doc so that we can define + // dockey for the complete document. + requested, result, err := this.generatePrimary(docMap, &typeDef) + if err != nil { + return nil, err + } + doc, err := client.NewDocFromMap(requested) + if err != nil { + return nil, NewErrFailedToGenerateDoc(err) + } + + result = append(result, gen.GeneratedDoc{Col: &typeDef, Doc: doc}) + + secondaryDocs, err := this.generateSecondaryDocs(docMap, doc.Key().String(), &typeDef, "") + if err != nil { + return nil, err + } + result = append(result, secondaryDocs...) + return result, nil +} + +func (this *docGenerator) generateSecondaryDocs( + primaryDocMap map[string]any, + docKey string, + primaryType *client.CollectionDefinition, + parentTypeName string, +) ([]gen.GeneratedDoc, error) { + result := []gen.GeneratedDoc{} + for _, field := range primaryType.Schema.Fields { + if field.IsRelation() { + if _, hasProp := primaryDocMap[field.Name]; hasProp { + if !field.IsPrimaryRelation() && + (parentTypeName == "" || parentTypeName != field.Schema) { + docs, err := this.generateSecondaryDocsForField( + primaryDocMap, primaryType.Description.Name, &field, docKey) + if err != nil { + return nil, err + } + result = append(result, docs...) + } + } + } + } + return result, nil +} + +// generateSecondaryDocsForField generates secondary docs for the given field of a primary doc. +func (this *docGenerator) generateSecondaryDocsForField( + primaryDoc map[string]any, + primaryTypeName string, + relField *client.FieldDescription, + primaryDocKey string, +) ([]gen.GeneratedDoc, error) { + result := []gen.GeneratedDoc{} + relTypeDef := this.types[relField.Schema] + primaryPropName := "" + for _, relDocField := range relTypeDef.Schema.Fields { + if relDocField.Schema == primaryTypeName && relDocField.IsPrimaryRelation() { + primaryPropName = relDocField.Name + request.RelatedObjectID + switch relVal := primaryDoc[relField.Name].(type) { + case []map[string]any: + for _, relDoc := range relVal { + relDoc[primaryPropName] = primaryDocKey + actions, err := this.generateRelatedDocs(relDoc, relTypeDef.Description.Name) + if err != nil { + return nil, err + } + result = append(result, actions...) + } + case map[string]any: + relVal[primaryPropName] = primaryDocKey + actions, err := this.generateRelatedDocs(relVal, relTypeDef.Description.Name) + if err != nil { + return nil, err + } + result = append(result, actions...) + } + } + } + return result, nil +} diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go new file mode 100644 index 0000000000..1092280015 --- /dev/null +++ b/tests/predefined/gen_predefined_test.go @@ -0,0 +1,508 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package predefined + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" +) + +func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { + schema := ` + type User { + name: String + age: Int + }` + + docsList := DocsList{ + ColName: "User", + Docs: []map[string]any{ + {"name": "John", "age": 30}, + {"name": "Fred", "age": 25}, + }, + } + docs, err := CreateFromSDL(schema, docsList) + assert.NoError(t, err) + + errorMsg := assertDocs(mustAddKeysToDocs(docsList.Docs), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { + schema := ` + type User { + name: String + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + {"name": "John", "age": 30}, + {"name": "Fred", "age": 25}, + }, + }) + assert.NoError(t, err) + + errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + {"name": "John"}, + {"name": "Fred"}, + }), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { + schema := ` + type User { + name: String + device: Device + } + type Device { + model: String + owner: User + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "device": map[string]any{ + "model": "iPhone", + }, + }, + { + "name": "Fred", + "device": map[string]any{ + "model": "MacBook", + }, + }, + }, + }) + assert.NoError(t, err) + + errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + {"name": "John"}, + {"name": "Fred"}, + {"model": "iPhone", "owner_id": mustGetDocKeyFromDocMap(map[string]any{"name": "John"})}, + {"model": "MacBook", "owner_id": mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"})}, + }), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { + schema := ` + type User { + name: String + device: Device @primary + } + type Device { + model: String + owner: User + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "device": map[string]any{ + "model": "iPhone", + }, + }, + { + "name": "Fred", + "device": map[string]any{ + "model": "MacBook", + }, + }, + }, + }) + assert.NoError(t, err) + + errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + {"name": "John", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "iPhone"})}, + {"name": "Fred", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "MacBook"})}, + {"model": "iPhone"}, + {"model": "MacBook"}, + }), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { + schema := ` + type User { + name: String + device: Device @primary + } + type Device { + model: String + owner: User + specs: Specs @primary + } + type Specs { + OS: String + device: Device + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "device": map[string]any{ + "model": "iPhone", + "specs": map[string]any{ + "OS": "iOS", + }, + }, + }, + }, + }) + assert.NoError(t, err) + + specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS"}) + deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone", "specs_id": specsDoc[request.KeyFieldName]}) + userDoc := mustAddKeyToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.KeyFieldName]}) + + errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { + schema := ` + type User { + name: String + device: Device + } + type Device { + model: String + owner: User @primary + specs: Specs @primary + } + type Specs { + OS: String + device: Device + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "device": map[string]any{ + "model": "iPhone", + "specs": map[string]any{ + "OS": "iOS", + }, + }, + }, + }, + }) + assert.NoError(t, err) + + specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS"}) + userDoc := mustAddKeyToDoc(map[string]any{"name": "John"}) + deviceDoc := mustAddKeyToDoc(map[string]any{ + "model": "iPhone", + "specs_id": specsDoc[request.KeyFieldName], + "owner_id": userDoc[request.KeyFieldName], + }) + + errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { + schema := ` + type User { + name: String + device: Device @primary + } + type Device { + model: String + owner: User + specs: Specs + } + type Specs { + OS: String + device: Device @primary + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "device": map[string]any{ + "model": "iPhone", + "specs": map[string]any{ + "OS": "iOS", + }, + }, + }, + }, + }) + assert.NoError(t, err) + + deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone"}) + specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS", "device_id": deviceDoc[request.KeyFieldName]}) + userDoc := mustAddKeyToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.KeyFieldName]}) + + errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { + schema := ` + type User { + name: String + device: Device @primary + address: Address @primary + } + type Device { + model: String + owner: User + } + type Address { + street: String + user: User + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "device": map[string]any{ + "model": "iPhone", + }, + "address": map[string]any{ + "street": "Backer", + }, + }, + }, + }) + assert.NoError(t, err) + + deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone"}) + addressDoc := mustAddKeyToDoc(map[string]any{"street": "Backer"}) + userDoc := mustAddKeyToDoc(map[string]any{ + "name": "John", + "device_id": deviceDoc[request.KeyFieldName], + "address_id": addressDoc[request.KeyFieldName], + }) + + errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, addressDoc}, docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_OneToMany(t *testing.T) { + schema := ` + type User { + name: String + devices: [Device] + } + type Device { + model: String + owner: User + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "devices": []map[string]any{ + {"model": "iPhone"}, + {"model": "PlayStation"}, + }, + }, + { + "name": "Fred", + "devices": []map[string]any{ + {"model": "Surface"}, + {"model": "Pixel"}, + }, + }, + }, + }) + assert.NoError(t, err) + + johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) + fredDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"}) + errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + {"name": "John"}, + {"name": "Fred"}, + {"model": "iPhone", "owner_id": johnDocKey}, + {"model": "PlayStation", "owner_id": johnDocKey}, + {"model": "Surface", "owner_id": fredDocKey}, + {"model": "Pixel", "owner_id": fredDocKey}, + }), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefinedFromSchema_OneToManyToOne(t *testing.T) { + schema := ` + type User { + name: String + devices: [Device] + } + type Device { + model: String + owner: User + specs: Specs + } + type Specs { + CPU: String + device: Device @primary + }` + + docs, err := CreateFromSDL(schema, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "devices": []map[string]any{ + { + "model": "iPhone", + "specs": map[string]any{ + "CPU": "A13", + }, + }, + { + "model": "MacBook", + "specs": map[string]any{ + "CPU": "M2", + }, + }, + }, + }, + }, + }) + assert.NoError(t, err) + + johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) + errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + {"name": "John"}, + {"model": "iPhone", "owner_id": johnDocKey}, + {"model": "MacBook", "owner_id": johnDocKey}, + {"CPU": "A13", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "iPhone", "owner_id": johnDocKey})}, + {"CPU": "M2", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "MacBook", "owner_id": johnDocKey})}, + }), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} + +func TestGeneratePredefined_OneToMany(t *testing.T) { + defs := []client.CollectionDefinition{ + { + Description: client.CollectionDescription{ + Name: "User", + ID: 0, + }, + Schema: client.SchemaDescription{ + Name: "User", + Fields: []client.FieldDescription{ + { + Name: "name", + Kind: client.FieldKind_STRING, + }, + { + Name: "devices", + Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, + Schema: "Device", + RelationType: client.Relation_Type_MANY | client.Relation_Type_ONEMANY, + }, + }, + }, + }, + { + Description: client.CollectionDescription{ + Name: "Device", + ID: 1, + }, + Schema: client.SchemaDescription{ + Name: "Device", + Fields: []client.FieldDescription{ + { + Name: "model", + Kind: client.FieldKind_STRING, + }, + { + Name: "owner", + Kind: client.FieldKind_FOREIGN_OBJECT, + Schema: "User", + RelationType: client.Relation_Type_ONE | + client.Relation_Type_ONEMANY | + client.Relation_Type_Primary, + }, + }, + }, + }, + } + docs, err := Create(defs, DocsList{ + ColName: "User", + Docs: []map[string]any{ + { + "name": "John", + "devices": []map[string]any{ + {"model": "iPhone"}, + {"model": "PlayStation"}, + }, + }, + { + "name": "Fred", + "devices": []map[string]any{ + {"model": "Surface"}, + {"model": "Pixel"}, + }, + }, + }, + }) + assert.NoError(t, err) + + johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) + fredDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"}) + errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + {"name": "John"}, + {"name": "Fred"}, + {"model": "iPhone", "owner_id": johnDocKey}, + {"model": "PlayStation", "owner_id": johnDocKey}, + {"model": "Surface", "owner_id": fredDocKey}, + {"model": "Pixel", "owner_id": fredDocKey}, + }), docs) + if errorMsg != "" { + t.Error(errorMsg) + } +} diff --git a/tests/predefined/util_test.go b/tests/predefined/util_test.go new file mode 100644 index 0000000000..da5d880ba8 --- /dev/null +++ b/tests/predefined/util_test.go @@ -0,0 +1,89 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package predefined + +import ( + "fmt" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/tests/gen" +) + +func areValuesEquivalent(a, b any) bool { + strA := fmt.Sprintf("%v", a) + strB := fmt.Sprintf("%v", b) + + return strA == strB +} + +func areMapsEquivalent(m1, m2 map[string]any) bool { + if len(m1) != len(m2) { + return false + } + for k, v := range m1 { + if !areValuesEquivalent(v, m2[k]) { + return false + } + } + return true +} + +func assertDoc(expected map[string]any, actual gen.GeneratedDoc) string { + actualMap, err := actual.Doc.ToMap() + if err != nil { + return "can not convert doc to map: " + err.Error() + } + if !areMapsEquivalent(expected, actualMap) { + return "docs are not equal" + } + return "" +} + +// assertDocs asserts that the expected docs are equal to the actual docs ignoring order +func assertDocs(expected []map[string]any, actual []gen.GeneratedDoc) string { + if len(expected) != len(actual) { + return fmt.Sprintf("expected len %d, got %d", len(expected), len(actual)) + } +outer: + for i := 0; i < len(expected); i++ { + for j := 0; j < len(actual); j++ { + errorMsg := assertDoc(expected[i], actual[j]) + if errorMsg == "" { + actual = append(actual[:j], actual[j+1:]...) + continue outer + } + } + return fmt.Sprintf("expected doc not found: %v", expected[i]) + } + + return "" +} + +func mustGetDocKeyFromDocMap(docMap map[string]any) string { + doc, err := client.NewDocFromMap(docMap) + if err != nil { + panic("can not get doc from map" + err.Error()) + } + return doc.Key().String() +} + +func mustAddKeyToDoc(doc map[string]any) map[string]any { + doc[request.KeyFieldName] = mustGetDocKeyFromDocMap(doc) + return doc +} + +func mustAddKeysToDocs(docs []map[string]any) []map[string]any { + for i := range docs { + mustAddKeyToDoc(docs[i]) + } + return docs +} From 42cd743d3cb89c3941a88b4b1cbdea7a71c50724 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 27 Nov 2023 17:54:11 -0500 Subject: [PATCH 07/60] chore: Update Lens to v0.5 (#2083) ## Relevant issue(s) Resolves #2082 ## Description Update Lens to v0.5. This includes Lens-enumerable, and wasmtime v15. This is a breaking change for any lens modules. --- .../workflows/test-and-upload-coverage.yml | 2 +- go.mod | 4 +-- go.sum | 8 ++--- tests/lenses/rust_wasm32_copy/Cargo.toml | 2 +- tests/lenses/rust_wasm32_copy/src/lib.rs | 19 +++++++++--- tests/lenses/rust_wasm32_remove/Cargo.toml | 2 +- tests/lenses/rust_wasm32_remove/src/lib.rs | 19 +++++++++--- .../lenses/rust_wasm32_set_default/Cargo.toml | 2 +- .../lenses/rust_wasm32_set_default/src/lib.rs | 29 ++++++++++++++----- 9 files changed, 62 insertions(+), 25 deletions(-) diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index 15846256fb..d92ae01cce 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -28,7 +28,7 @@ jobs: name: Run tests matrix job strategy: - fail-fast: true + fail-fast: false matrix: os: [ubuntu-latest] client-type: [go, http, cli] diff --git a/go.mod b/go.mod index 759d70c2a1..523cf9a22c 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,7 @@ require ( github.com/ipfs/go-log v1.0.5 github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 - github.com/lens-vm/lens/host-go v0.0.0-20231108161805-be145dd591bb + github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c github.com/libp2p/go-libp2p v0.32.1 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.1 @@ -57,7 +57,7 @@ require ( github.com/Jorropo/jsync v1.0.1 // indirect github.com/benbjohnson/clock v1.3.5 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/bytecodealliance/wasmtime-go/v14 v14.0.0 // indirect + github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/containerd/cgroups v1.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect diff --git a/go.sum b/go.sum index dff217c361..49e011e0ff 100644 --- a/go.sum +++ b/go.sum @@ -64,8 +64,8 @@ github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBT github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= github.com/bxcodec/faker v2.0.1+incompatible/go.mod h1:BNzfpVdTwnFJ6GtfYTcQu6l6rHShT+veBxNCnjCx5XM= -github.com/bytecodealliance/wasmtime-go/v14 v14.0.0 h1:ur7S3P+PAeJmgllhSrKnGQOAmmtUbLQxb/nw2NZiaEM= -github.com/bytecodealliance/wasmtime-go/v14 v14.0.0/go.mod h1:tqOVEUjnXY6aGpSfM9qdVRR6G//Yc513fFYUdzZb/DY= +github.com/bytecodealliance/wasmtime-go/v15 v15.0.0 h1:4R2MpSPPbtSxqdsOTvsMn1pnwdEhzbDGMao6LUUSLv4= +github.com/bytecodealliance/wasmtime-go/v15 v15.0.0/go.mod h1:m6vB/SsM+pnJkVHmO1wzHYUeYtciltTKuxuvkR8pYcY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -362,8 +362,8 @@ github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/lens-vm/lens/host-go v0.0.0-20231108161805-be145dd591bb h1:e/9Oqk71LAu/qADRkDMgJAvb52CU1Ks27GBsIdaqW1c= -github.com/lens-vm/lens/host-go v0.0.0-20231108161805-be145dd591bb/go.mod h1:RNLpJkOAuqT392hxxz0FQiMiEd3pY8eI5s1AUEPAOeE= +github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c h1:bG+mr4SqbYRU69L6CSvHDsKbRg5Q9vaN2T5g7qcrPdQ= +github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c/go.mod h1:a4edl+KcOVk1Nj3EjG77htqg2/0Mmy3bSG0kl+FWVqQ= github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8= github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg= github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38yPW7c= diff --git a/tests/lenses/rust_wasm32_copy/Cargo.toml b/tests/lenses/rust_wasm32_copy/Cargo.toml index 77b2766d8b..64141ba4cb 100644 --- a/tests/lenses/rust_wasm32_copy/Cargo.toml +++ b/tests/lenses/rust_wasm32_copy/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [dependencies] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.87" -lens_sdk = { version = "0.1.0", git = "https://github.com/lens-vm/lens.git" } +lens_sdk = "^0.5" diff --git a/tests/lenses/rust_wasm32_copy/src/lib.rs b/tests/lenses/rust_wasm32_copy/src/lib.rs index c1084c9a5a..6a2cb06ce5 100644 --- a/tests/lenses/rust_wasm32_copy/src/lib.rs +++ b/tests/lenses/rust_wasm32_copy/src/lib.rs @@ -7,6 +7,13 @@ use std::sync::RwLock; use std::error::Error; use std::{fmt, error}; use serde::Deserialize; +use lens_sdk::StreamOption; +use lens_sdk::option::StreamOption::{Some, None, EndOfStream}; + +#[link(wasm_import_module = "lens")] +extern "C" { + fn next() -> *mut u8; +} #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] enum ModuleError { @@ -32,7 +39,7 @@ pub struct Parameters { pub dst: String, } -static PARAMETERS: RwLock> = RwLock::new(None); +static PARAMETERS: RwLock> = RwLock::new(None); #[no_mangle] pub extern fn alloc(size: usize) -> *mut u8 { @@ -57,22 +64,25 @@ fn try_set_param(ptr: *mut u8) -> Result<(), Box> { } #[no_mangle] -pub extern fn transform(ptr: *mut u8) -> *mut u8 { - match try_transform(ptr) { +pub extern fn transform() -> *mut u8 { + match try_transform() { Ok(o) => match o { Some(result_json) => lens_sdk::to_mem(lens_sdk::JSON_TYPE_ID, &result_json), None => lens_sdk::nil_ptr(), + EndOfStream => lens_sdk::to_mem(lens_sdk::EOS_TYPE_ID, &[]), }, Err(e) => lens_sdk::to_mem(lens_sdk::ERROR_TYPE_ID, &e.to_string().as_bytes()) } } -fn try_transform(ptr: *mut u8) -> Result>, Box> { +fn try_transform() -> Result>, Box> { + let ptr = unsafe { next() }; let mut input = match lens_sdk::try_from_mem::>(ptr)? { Some(v) => v, // Implementations of `transform` are free to handle nil however they like. In this // implementation we chose to return nil given a nil input. None => return Ok(None), + EndOfStream => return Ok(EndOfStream) }; let params = PARAMETERS.read()? @@ -88,5 +98,6 @@ fn try_transform(ptr: *mut u8) -> Result>, Box> { result.insert(params.dst, value); let result_json = serde_json::to_vec(&result)?; + lens_sdk::free_transport_buffer(ptr)?; Ok(Some(result_json)) } diff --git a/tests/lenses/rust_wasm32_remove/Cargo.toml b/tests/lenses/rust_wasm32_remove/Cargo.toml index c3b678d76e..a066d4d228 100644 --- a/tests/lenses/rust_wasm32_remove/Cargo.toml +++ b/tests/lenses/rust_wasm32_remove/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [dependencies] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.87" -lens_sdk = { version = "0.1.0", git = "https://github.com/lens-vm/lens.git" } +lens_sdk = "^0.5" diff --git a/tests/lenses/rust_wasm32_remove/src/lib.rs b/tests/lenses/rust_wasm32_remove/src/lib.rs index 5bf2edb15f..249f0f07ed 100644 --- a/tests/lenses/rust_wasm32_remove/src/lib.rs +++ b/tests/lenses/rust_wasm32_remove/src/lib.rs @@ -3,6 +3,13 @@ use std::sync::RwLock; use std::error::Error; use std::{fmt, error}; use serde::Deserialize; +use lens_sdk::StreamOption; +use lens_sdk::option::StreamOption::{Some, None, EndOfStream}; + +#[link(wasm_import_module = "lens")] +extern "C" { + fn next() -> *mut u8; +} #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] enum ModuleError { @@ -24,7 +31,7 @@ pub struct Parameters { pub target: String, } -static PARAMETERS: RwLock> = RwLock::new(None); +static PARAMETERS: RwLock> = RwLock::new(None); #[no_mangle] pub extern fn alloc(size: usize) -> *mut u8 { @@ -49,22 +56,25 @@ fn try_set_param(ptr: *mut u8) -> Result<(), Box> { } #[no_mangle] -pub extern fn transform(ptr: *mut u8) -> *mut u8 { - match try_transform(ptr) { +pub extern fn transform() -> *mut u8 { + match try_transform() { Ok(o) => match o { Some(result_json) => lens_sdk::to_mem(lens_sdk::JSON_TYPE_ID, &result_json), None => lens_sdk::nil_ptr(), + EndOfStream => lens_sdk::to_mem(lens_sdk::EOS_TYPE_ID, &[]), }, Err(e) => lens_sdk::to_mem(lens_sdk::ERROR_TYPE_ID, &e.to_string().as_bytes()) } } -fn try_transform(ptr: *mut u8) -> Result>, Box> { +fn try_transform() -> Result>, Box> { + let ptr = unsafe { next() }; let mut input = match lens_sdk::try_from_mem::>(ptr)? { Some(v) => v, // Implementations of `transform` are free to handle nil however they like. In this // implementation we chose to return nil given a nil input. None => return Ok(None), + EndOfStream => return Ok(EndOfStream) }; let params = PARAMETERS.read()? @@ -75,5 +85,6 @@ fn try_transform(ptr: *mut u8) -> Result>, Box> { input.remove(¶ms.target); let result_json = serde_json::to_vec(&input.clone())?; + lens_sdk::free_transport_buffer(ptr)?; Ok(Some(result_json)) } diff --git a/tests/lenses/rust_wasm32_set_default/Cargo.toml b/tests/lenses/rust_wasm32_set_default/Cargo.toml index 14ddd3f992..34f77bd7a6 100644 --- a/tests/lenses/rust_wasm32_set_default/Cargo.toml +++ b/tests/lenses/rust_wasm32_set_default/Cargo.toml @@ -9,4 +9,4 @@ crate-type = ["cdylib"] [dependencies] serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.87" -lens_sdk = { version = "0.1.0", git = "https://github.com/lens-vm/lens.git" } +lens_sdk = "^0.5" diff --git a/tests/lenses/rust_wasm32_set_default/src/lib.rs b/tests/lenses/rust_wasm32_set_default/src/lib.rs index 3bf433ffa1..e3003f7d73 100644 --- a/tests/lenses/rust_wasm32_set_default/src/lib.rs +++ b/tests/lenses/rust_wasm32_set_default/src/lib.rs @@ -3,6 +3,13 @@ use std::sync::RwLock; use std::error::Error; use std::{fmt, error}; use serde::Deserialize; +use lens_sdk::StreamOption; +use lens_sdk::option::StreamOption::{Some, None, EndOfStream}; + +#[link(wasm_import_module = "lens")] +extern "C" { + fn next() -> *mut u8; +} #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)] enum ModuleError { @@ -25,7 +32,7 @@ pub struct Parameters { pub value: serde_json::Value, } -static PARAMETERS: RwLock> = RwLock::new(None); +static PARAMETERS: RwLock> = RwLock::new(None); #[no_mangle] pub extern fn alloc(size: usize) -> *mut u8 { @@ -50,22 +57,25 @@ fn try_set_param(ptr: *mut u8) -> Result<(), Box> { } #[no_mangle] -pub extern fn transform(ptr: *mut u8) -> *mut u8 { - match try_transform(ptr) { +pub extern fn transform() -> *mut u8 { + match try_transform() { Ok(o) => match o { Some(result_json) => lens_sdk::to_mem(lens_sdk::JSON_TYPE_ID, &result_json), None => lens_sdk::nil_ptr(), + EndOfStream => lens_sdk::to_mem(lens_sdk::EOS_TYPE_ID, &[]), }, Err(e) => lens_sdk::to_mem(lens_sdk::ERROR_TYPE_ID, &e.to_string().as_bytes()) } } -fn try_transform(ptr: *mut u8) -> Result>, Box> { +fn try_transform() -> Result>, Box> { + let ptr = unsafe { next() }; let mut input = match lens_sdk::try_from_mem::>(ptr)? { Some(v) => v, // Implementations of `transform` are free to handle nil however they like. In this // implementation we chose to return nil given a nil input. None => return Ok(None), + EndOfStream => return Ok(EndOfStream) }; let params = PARAMETERS.read()? @@ -76,26 +86,30 @@ fn try_transform(ptr: *mut u8) -> Result>, Box> { input.insert(params.dst, params.value); let result_json = serde_json::to_vec(&input.clone())?; + lens_sdk::free_transport_buffer(ptr)?; Ok(Some(result_json)) } #[no_mangle] -pub extern fn inverse(ptr: *mut u8) -> *mut u8 { - match try_inverse(ptr) { +pub extern fn inverse() -> *mut u8 { + match try_inverse() { Ok(o) => match o { Some(result_json) => lens_sdk::to_mem(lens_sdk::JSON_TYPE_ID, &result_json), None => lens_sdk::nil_ptr(), + EndOfStream => lens_sdk::to_mem(lens_sdk::EOS_TYPE_ID, &[]), }, Err(e) => lens_sdk::to_mem(lens_sdk::ERROR_TYPE_ID, &e.to_string().as_bytes()) } } -fn try_inverse(ptr: *mut u8) -> Result>, Box> { +fn try_inverse() -> Result>, Box> { + let ptr = unsafe { next() }; let mut input = match lens_sdk::try_from_mem::>(ptr)? { Some(v) => v, // Implementations of `transform` are free to handle nil however they like. In this // implementation we chose to return nil given a nil input. None => return Ok(None), + EndOfStream => return Ok(EndOfStream) }; let params = PARAMETERS.read()? @@ -106,5 +120,6 @@ fn try_inverse(ptr: *mut u8) -> Result>, Box> { input.remove(¶ms.dst); let result_json = serde_json::to_vec(&input.clone())?; + lens_sdk::free_transport_buffer(ptr)?; Ok(Some(result_json)) } \ No newline at end of file From 4db8d82b6c3641ea8ed8adcf1e6ced43b7ba9213 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 18:57:00 -0500 Subject: [PATCH 08/60] bot: Update dependencies (bulk dependabot PRs) 27-11-2023 (#2081) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2079 bot: Bump @types/react-dom from 18.2.15 to 18.2.17 in /playground #2078 bot: Bump typescript from 5.2.2 to 5.3.2 in /playground #2076 bot: Bump @typescript-eslint/parser from 6.11.0 to 6.12.0 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #2077 bot: Bump @types/react from 18.2.37 to 18.2.38 in /playground #2075 bot: Bump vite from 4.5.0 to 5.0.2 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 616 +++++++++++++++++------------------ playground/package.json | 10 +- 2 files changed, 308 insertions(+), 318 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 6b67161990..c102e96425 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,17 +15,17 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.37", - "@types/react-dom": "^18.2.15", + "@types/react": "^18.2.38", + "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.1", "@typescript-eslint/eslint-plugin": "^6.12.0", - "@typescript-eslint/parser": "^6.11.0", + "@typescript-eslint/parser": "^6.12.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.54.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.4", - "typescript": "^5.2.2", - "vite": "^4.5.0" + "typescript": "^5.3.2", + "vite": "^5.0.2" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -112,9 +112,9 @@ "optional": true }, "node_modules/@esbuild/android-arm": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.18.17.tgz", - "integrity": "sha512-wHsmJG/dnL3OkpAcwbgoBTTMHVi4Uyou3F5mf58ZtmUyIKfcdA7TROav/6tCzET4A3QW2Q2FC+eFneMU+iyOxg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.8.tgz", + "integrity": "sha512-31E2lxlGM1KEfivQl8Yf5aYU/mflz9g06H6S15ITUFQueMFtFjESRMoDSkvMo8thYvLBax+VKTPlpnx+sPicOA==", "cpu": [ "arm" ], @@ -128,9 +128,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.18.17.tgz", - "integrity": "sha512-9np+YYdNDed5+Jgr1TdWBsozZ85U1Oa3xW0c7TWqH0y2aGghXtZsuT8nYRbzOMcl0bXZXjOGbksoTtVOlWrRZg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.8.tgz", + "integrity": "sha512-B8JbS61bEunhfx8kasogFENgQfr/dIp+ggYXwTqdbMAgGDhRa3AaPpQMuQU0rNxDLECj6FhDzk1cF9WHMVwrtA==", "cpu": [ "arm64" ], @@ -144,9 +144,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.18.17.tgz", - "integrity": "sha512-O+FeWB/+xya0aLg23hHEM2E3hbfwZzjqumKMSIqcHbNvDa+dza2D0yLuymRBQQnC34CWrsJUXyH2MG5VnLd6uw==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.8.tgz", + "integrity": "sha512-rdqqYfRIn4jWOp+lzQttYMa2Xar3OK9Yt2fhOhzFXqg0rVWEfSclJvZq5fZslnz6ypHvVf3CT7qyf0A5pM682A==", "cpu": [ "x64" ], @@ -160,9 +160,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.18.17.tgz", - "integrity": "sha512-M9uJ9VSB1oli2BE/dJs3zVr9kcCBBsE883prage1NWz6pBS++1oNn/7soPNS3+1DGj0FrkSvnED4Bmlu1VAE9g==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.8.tgz", + "integrity": "sha512-RQw9DemMbIq35Bprbboyf8SmOr4UXsRVxJ97LgB55VKKeJOOdvsIPy0nFyF2l8U+h4PtBx/1kRf0BelOYCiQcw==", "cpu": [ "arm64" ], @@ -176,9 +176,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.18.17.tgz", - "integrity": "sha512-XDre+J5YeIJDMfp3n0279DFNrGCXlxOuGsWIkRb1NThMZ0BsrWXoTg23Jer7fEXQ9Ye5QjrvXpxnhzl3bHtk0g==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.8.tgz", + "integrity": "sha512-3sur80OT9YdeZwIVgERAysAbwncom7b4bCI2XKLjMfPymTud7e/oY4y+ci1XVp5TfQp/bppn7xLw1n/oSQY3/Q==", "cpu": [ "x64" ], @@ -192,9 +192,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.17.tgz", - "integrity": "sha512-cjTzGa3QlNfERa0+ptykyxs5A6FEUQQF0MuilYXYBGdBxD3vxJcKnzDlhDCa1VAJCmAxed6mYhA2KaJIbtiNuQ==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.8.tgz", + "integrity": "sha512-WAnPJSDattvS/XtPCTj1tPoTxERjcTpH6HsMr6ujTT+X6rylVe8ggxk8pVxzf5U1wh5sPODpawNicF5ta/9Tmw==", "cpu": [ "arm64" ], @@ -208,9 +208,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.18.17.tgz", - "integrity": "sha512-sOxEvR8d7V7Kw8QqzxWc7bFfnWnGdaFBut1dRUYtu+EIRXefBc/eIsiUiShnW0hM3FmQ5Zf27suDuHsKgZ5QrA==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.8.tgz", + "integrity": "sha512-ICvZyOplIjmmhjd6mxi+zxSdpPTKFfyPPQMQTK/w+8eNK6WV01AjIztJALDtwNNfFhfZLux0tZLC+U9nSyA5Zg==", "cpu": [ "x64" ], @@ -224,9 +224,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.18.17.tgz", - "integrity": "sha512-2d3Lw6wkwgSLC2fIvXKoMNGVaeY8qdN0IC3rfuVxJp89CRfA3e3VqWifGDfuakPmp90+ZirmTfye1n4ncjv2lg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.8.tgz", + "integrity": "sha512-H4vmI5PYqSvosPaTJuEppU9oz1dq2A7Mr2vyg5TF9Ga+3+MGgBdGzcyBP7qK9MrwFQZlvNyJrvz6GuCaj3OukQ==", "cpu": [ "arm" ], @@ -240,9 +240,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.18.17.tgz", - "integrity": "sha512-c9w3tE7qA3CYWjT+M3BMbwMt+0JYOp3vCMKgVBrCl1nwjAlOMYzEo+gG7QaZ9AtqZFj5MbUc885wuBBmu6aADQ==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.8.tgz", + "integrity": "sha512-z1zMZivxDLHWnyGOctT9JP70h0beY54xDDDJt4VpTX+iwA77IFsE1vCXWmprajJGa+ZYSqkSbRQ4eyLCpCmiCQ==", "cpu": [ "arm64" ], @@ -256,9 +256,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.18.17.tgz", - "integrity": "sha512-1DS9F966pn5pPnqXYz16dQqWIB0dmDfAQZd6jSSpiT9eX1NzKh07J6VKR3AoXXXEk6CqZMojiVDSZi1SlmKVdg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.8.tgz", + "integrity": "sha512-1a8suQiFJmZz1khm/rDglOc8lavtzEMRo0v6WhPgxkrjcU0LkHj+TwBrALwoz/OtMExvsqbbMI0ChyelKabSvQ==", "cpu": [ "ia32" ], @@ -272,9 +272,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.18.17.tgz", - "integrity": "sha512-EvLsxCk6ZF0fpCB6w6eOI2Fc8KW5N6sHlIovNe8uOFObL2O+Mr0bflPHyHwLT6rwMg9r77WOAWb2FqCQrVnwFg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.8.tgz", + "integrity": "sha512-fHZWS2JJxnXt1uYJsDv9+b60WCc2RlvVAy1F76qOLtXRO+H4mjt3Tr6MJ5l7Q78X8KgCFudnTuiQRBhULUyBKQ==", "cpu": [ "loong64" ], @@ -288,9 +288,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.18.17.tgz", - "integrity": "sha512-e0bIdHA5p6l+lwqTE36NAW5hHtw2tNRmHlGBygZC14QObsA3bD4C6sXLJjvnDIjSKhW1/0S3eDy+QmX/uZWEYQ==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.8.tgz", + "integrity": "sha512-Wy/z0EL5qZYLX66dVnEg9riiwls5IYnziwuju2oUiuxVc+/edvqXa04qNtbrs0Ukatg5HEzqT94Zs7J207dN5Q==", "cpu": [ "mips64el" ], @@ -304,9 +304,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.18.17.tgz", - "integrity": "sha512-BAAilJ0M5O2uMxHYGjFKn4nJKF6fNCdP1E0o5t5fvMYYzeIqy2JdAP88Az5LHt9qBoUa4tDaRpfWt21ep5/WqQ==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.8.tgz", + "integrity": "sha512-ETaW6245wK23YIEufhMQ3HSeHO7NgsLx8gygBVldRHKhOlD1oNeNy/P67mIh1zPn2Hr2HLieQrt6tWrVwuqrxg==", "cpu": [ "ppc64" ], @@ -320,9 +320,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.18.17.tgz", - "integrity": "sha512-Wh/HW2MPnC3b8BqRSIme/9Zhab36PPH+3zam5pqGRH4pE+4xTrVLx2+XdGp6fVS3L2x+DrsIcsbMleex8fbE6g==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.8.tgz", + "integrity": "sha512-T2DRQk55SgoleTP+DtPlMrxi/5r9AeFgkhkZ/B0ap99zmxtxdOixOMI570VjdRCs9pE4Wdkz7JYrsPvsl7eESg==", "cpu": [ "riscv64" ], @@ -336,9 +336,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.18.17.tgz", - "integrity": "sha512-j/34jAl3ul3PNcK3pfI0NSlBANduT2UO5kZ7FCaK33XFv3chDhICLY8wJJWIhiQ+YNdQ9dxqQctRg2bvrMlYgg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.8.tgz", + "integrity": "sha512-NPxbdmmo3Bk7mbNeHmcCd7R7fptJaczPYBaELk6NcXxy7HLNyWwCyDJ/Xx+/YcNH7Im5dHdx9gZ5xIwyliQCbg==", "cpu": [ "s390x" ], @@ -352,9 +352,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.18.17.tgz", - "integrity": "sha512-QM50vJ/y+8I60qEmFxMoxIx4de03pGo2HwxdBeFd4nMh364X6TIBZ6VQ5UQmPbQWUVWHWws5MmJXlHAXvJEmpQ==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.8.tgz", + "integrity": "sha512-lytMAVOM3b1gPypL2TRmZ5rnXl7+6IIk8uB3eLsV1JwcizuolblXRrc5ShPrO9ls/b+RTp+E6gbsuLWHWi2zGg==", "cpu": [ "x64" ], @@ -368,9 +368,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.18.17.tgz", - "integrity": "sha512-/jGlhWR7Sj9JPZHzXyyMZ1RFMkNPjC6QIAan0sDOtIo2TYk3tZn5UDrkE0XgsTQCxWTTOcMPf9p6Rh2hXtl5TQ==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.8.tgz", + "integrity": "sha512-hvWVo2VsXz/8NVt1UhLzxwAfo5sioj92uo0bCfLibB0xlOmimU/DeAEsQILlBQvkhrGjamP0/el5HU76HAitGw==", "cpu": [ "x64" ], @@ -384,9 +384,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.18.17.tgz", - "integrity": "sha512-rSEeYaGgyGGf4qZM2NonMhMOP/5EHp4u9ehFiBrg7stH6BYEEjlkVREuDEcQ0LfIl53OXLxNbfuIj7mr5m29TA==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.8.tgz", + "integrity": "sha512-/7Y7u77rdvmGTxR83PgaSvSBJCC2L3Kb1M/+dmSIvRvQPXXCuC97QAwMugBNG0yGcbEGfFBH7ojPzAOxfGNkwQ==", "cpu": [ "x64" ], @@ -400,9 +400,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.18.17.tgz", - "integrity": "sha512-Y7ZBbkLqlSgn4+zot4KUNYst0bFoO68tRgI6mY2FIM+b7ZbyNVtNbDP5y8qlu4/knZZ73fgJDlXID+ohY5zt5g==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.8.tgz", + "integrity": "sha512-9Lc4s7Oi98GqFA4HzA/W2JHIYfnXbUYgekUP/Sm4BG9sfLjyv6GKKHKKVs83SMicBF2JwAX6A1PuOLMqpD001w==", "cpu": [ "x64" ], @@ -416,9 +416,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.18.17.tgz", - "integrity": "sha512-bwPmTJsEQcbZk26oYpc4c/8PvTY3J5/QK8jM19DVlEsAB41M39aWovWoHtNm78sd6ip6prilxeHosPADXtEJFw==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.8.tgz", + "integrity": "sha512-rq6WzBGjSzihI9deW3fC2Gqiak68+b7qo5/3kmB6Gvbh/NYPA0sJhrnp7wgV4bNwjqM+R2AApXGxMO7ZoGhIJg==", "cpu": [ "arm64" ], @@ -432,9 +432,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.18.17.tgz", - "integrity": "sha512-H/XaPtPKli2MhW+3CQueo6Ni3Avggi6hP/YvgkEe1aSaxw+AeO8MFjq8DlgfTd9Iz4Yih3QCZI6YLMoyccnPRg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.8.tgz", + "integrity": "sha512-AIAbverbg5jMvJznYiGhrd3sumfwWs8572mIJL5NQjJa06P8KfCPWZQ0NwZbPQnbQi9OWSZhFVSUWjjIrn4hSw==", "cpu": [ "ia32" ], @@ -448,9 +448,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.18.17.tgz", - "integrity": "sha512-fGEb8f2BSA3CW7riJVurug65ACLuQAzKq0SSqkY2b2yHHH0MzDfbLyKIGzHwOI/gkHcxM/leuSW6D5w/LMNitA==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.8.tgz", + "integrity": "sha512-bfZ0cQ1uZs2PqpulNL5j/3w+GDhP36k1K5c38QdQg+Swy51jFZWWeIkteNsufkQxp986wnqRRsb/bHbY1WQ7TA==", "cpu": [ "x64" ], @@ -1401,6 +1401,162 @@ "@babel/runtime": "^7.13.10" } }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.6.0.tgz", + "integrity": "sha512-keHkkWAe7OtdALGoutLY3utvthkGF+Y17ws9LYT8pxMBYXaCoH/8dXS2uzo6e8+sEhY7y/zi5RFo22Dy2lFpDw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.6.0.tgz", + "integrity": "sha512-y3Kt+34smKQNWilicPbBz/MXEY7QwDzMFNgwEWeYiOhUt9MTWKjHqe3EVkXwT2fR7izOvHpDWZ0o2IyD9SWX7A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.6.0.tgz", + "integrity": "sha512-oLzzxcUIHltHxOCmaXl+pkIlU+uhSxef5HfntW7RsLh1eHm+vJzjD9Oo4oUKso4YuP4PpbFJNlZjJuOrxo8dPg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.6.0.tgz", + "integrity": "sha512-+ANnmjkcOBaV25n0+M0Bere3roeVAnwlKW65qagtuAfIxXF9YxUneRyAn/RDcIdRa7QrjRNJL3jR7T43ObGe8Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.6.0.tgz", + "integrity": "sha512-tBTSIkjSVUyrekddpkAqKOosnj1Fc0ZY0rJL2bIEWPKqlEQk0paORL9pUIlt7lcGJi3LzMIlUGXvtNi1Z6MOCQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.6.0.tgz", + "integrity": "sha512-Ed8uJI3kM11de9S0j67wAV07JUNhbAqIrDYhQBrQW42jGopgheyk/cdcshgGO4fW5Wjq97COCY/BHogdGvKVNQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.6.0.tgz", + "integrity": "sha512-mZoNQ/qK4D7SSY8v6kEsAAyDgznzLLuSFCA3aBHZTmf3HP/dW4tNLTtWh9+LfyO0Z1aUn+ecpT7IQ3WtIg3ViQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.6.0.tgz", + "integrity": "sha512-rouezFHpwCqdEXsqAfNsTgSWO0FoZ5hKv5p+TGO5KFhyN/dvYXNMqMolOb8BkyKcPqjYRBeT+Z6V3aM26rPaYg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.6.0.tgz", + "integrity": "sha512-Bbm+fyn3S6u51urfj3YnqBXg5vI2jQPncRRELaucmhBVyZkbWClQ1fEsRmdnCPpQOQfkpg9gZArvtMVkOMsh1w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.6.0.tgz", + "integrity": "sha512-+MRMcyx9L2kTrTUzYmR61+XVsliMG4odFb5UmqtiT8xOfEicfYAGEuF/D1Pww1+uZkYhBqAHpvju7VN+GnC3ng==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.6.0.tgz", + "integrity": "sha512-rxfeE6K6s/Xl2HGeK6cO8SiQq3k/3BYpw7cfhW5Bk2euXNEpuzi2cc7llxx1si1QgwfjNtdRNTGqdBzGlFZGFw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.6.0.tgz", + "integrity": "sha512-QqmCsydHS172Y0Kc13bkMXvipbJSvzeglBncJG3LsYJSiPlxYACz7MmJBs4A8l1oU+jfhYEIC/+AUSlvjmiX/g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, "node_modules/@swagger-api/apidom-ast": { "version": "0.83.0", "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.83.0.tgz", @@ -2081,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.37", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.37.tgz", - "integrity": "sha512-RGAYMi2bhRgEXT3f4B92WTohopH6bIXw05FuGlmJEnv/omEn190+QYEIYxIAuIBdKgboYYdVved2p1AxZVQnaw==", + "version": "18.2.38", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.38.tgz", + "integrity": "sha512-cBBXHzuPtQK6wNthuVMV6IjHAFkdl/FOPFIlkd81/Cd1+IqkHu/A+w4g43kaQQoYHik/ruaQBDL72HyCy1vuMw==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -2091,9 +2247,9 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.15", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.15.tgz", - "integrity": "sha512-HWMdW+7r7MR5+PZqJF6YFNSCtjz1T0dsvo/f1BV6HkV+6erD/nA7wd9NM00KVG83zf2nJ7uATPO9ttdIPvi3gg==", + "version": "18.2.17", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.17.tgz", + "integrity": "sha512-rvrT/M7Df5eykWFxn6MYt5Pem/Dbyc1N8Y0S9Mrkw2WFCRiqUgw9P7ul2NpwsXCSM1DVdENzdG9J5SreqfAIWg==", "devOptional": true, "dependencies": { "@types/react": "*" @@ -2172,63 +2328,16 @@ } } }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", - "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", - "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "node_modules/@typescript-eslint/parser": { "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", - "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.12.0.tgz", + "integrity": "sha512-s8/jNFPKPNRmXEnNXfuo1gemBdVmpQsK1pcu+QIvuNJuhFzGrpD7WjOcvDc/+uEdfzSYpNu7U/+MmbScjoQ6vg==", "dev": true, "dependencies": { + "@typescript-eslint/scope-manager": "6.12.0", "@typescript-eslint/types": "6.12.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.11.0.tgz", - "integrity": "sha512-+whEdjk+d5do5nxfxx73oanLL9ghKO3EwM9kBCkUtWMRwWuPaFv9ScuqlYfQ6pAD6ZiJhky7TZ2ZYhrMsfMxVQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/scope-manager": "6.11.0", - "@typescript-eslint/types": "6.11.0", - "@typescript-eslint/typescript-estree": "6.11.0", - "@typescript-eslint/visitor-keys": "6.11.0", + "@typescript-eslint/typescript-estree": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0", "debug": "^4.3.4" }, "engines": { @@ -2248,13 +2357,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.11.0.tgz", - "integrity": "sha512-0A8KoVvIURG4uhxAdjSaxy8RdRE//HztaZdG8KiHLP8WOXSk0vlF7Pvogv+vlJA5Rnjj/wDcFENvDaHb+gKd1A==", + "version": "6.12.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", + "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.11.0", - "@typescript-eslint/visitor-keys": "6.11.0" + "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/visitor-keys": "6.12.0" }, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2291,7 +2400,7 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "node_modules/@typescript-eslint/types": { "version": "6.12.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", @@ -2304,7 +2413,7 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "node_modules/@typescript-eslint/typescript-estree": { "version": "6.12.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", @@ -2331,63 +2440,6 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", - "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.12.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.11.0.tgz", - "integrity": "sha512-ZbEzuD4DwEJxwPqhv3QULlRj8KYTAnNsXxmfuUXFCxZmO6CF2gM/y+ugBSAQhrqaJL3M+oe4owdWunaHM6beqA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.11.0.tgz", - "integrity": "sha512-Aezzv1o2tWJwvZhedzvD5Yv7+Lpu1by/U1LZ5gLc4tCx8jUmuSCMioPFRjliN/6SJIvY6HpTtJIWubKuYYYesQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.11.0", - "@typescript-eslint/visitor-keys": "6.11.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, "node_modules/@typescript-eslint/utils": { "version": "6.12.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.12.0.tgz", @@ -2413,64 +2465,7 @@ "eslint": "^7.0.0 || ^8.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", - "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", - "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", - "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "node_modules/@typescript-eslint/visitor-keys": { "version": "6.12.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", @@ -2487,23 +2482,6 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.11.0.tgz", - "integrity": "sha512-+SUN/W7WjBr05uRxPggJPSzyB8zUpaYo2hByKasWbqr3PM8AXfZt8UHdNpBS1v9SA62qnSSMF3380SwDqqprgQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.11.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@ungap/structured-clone": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", @@ -3085,9 +3063,9 @@ } }, "node_modules/esbuild": { - "version": "0.18.17", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.18.17.tgz", - "integrity": "sha512-1GJtYnUxsJreHYA0Y+iQz2UEykonY66HNWOb0yXYZi9/kNrORUEHVg87eQsCtqh59PEJ5YVZJO98JHznMJSWjg==", + "version": "0.19.8", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.8.tgz", + "integrity": "sha512-l7iffQpT2OrZfH2rXIp7/FkmaeZM0vxbxN9KfiCwGYuZqzMg/JdvX26R31Zxn/Pxvsrg3Y9N6XTcnknqDyyv4w==", "dev": true, "hasInstallScript": true, "bin": { @@ -3097,28 +3075,28 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/android-arm": "0.18.17", - "@esbuild/android-arm64": "0.18.17", - "@esbuild/android-x64": "0.18.17", - "@esbuild/darwin-arm64": "0.18.17", - "@esbuild/darwin-x64": "0.18.17", - "@esbuild/freebsd-arm64": "0.18.17", - "@esbuild/freebsd-x64": "0.18.17", - "@esbuild/linux-arm": "0.18.17", - "@esbuild/linux-arm64": "0.18.17", - "@esbuild/linux-ia32": "0.18.17", - "@esbuild/linux-loong64": "0.18.17", - "@esbuild/linux-mips64el": "0.18.17", - "@esbuild/linux-ppc64": "0.18.17", - "@esbuild/linux-riscv64": "0.18.17", - "@esbuild/linux-s390x": "0.18.17", - "@esbuild/linux-x64": "0.18.17", - "@esbuild/netbsd-x64": "0.18.17", - "@esbuild/openbsd-x64": "0.18.17", - "@esbuild/sunos-x64": "0.18.17", - "@esbuild/win32-arm64": "0.18.17", - "@esbuild/win32-ia32": "0.18.17", - "@esbuild/win32-x64": "0.18.17" + "@esbuild/android-arm": "0.19.8", + "@esbuild/android-arm64": "0.19.8", + "@esbuild/android-x64": "0.19.8", + "@esbuild/darwin-arm64": "0.19.8", + "@esbuild/darwin-x64": "0.19.8", + "@esbuild/freebsd-arm64": "0.19.8", + "@esbuild/freebsd-x64": "0.19.8", + "@esbuild/linux-arm": "0.19.8", + "@esbuild/linux-arm64": "0.19.8", + "@esbuild/linux-ia32": "0.19.8", + "@esbuild/linux-loong64": "0.19.8", + "@esbuild/linux-mips64el": "0.19.8", + "@esbuild/linux-ppc64": "0.19.8", + "@esbuild/linux-riscv64": "0.19.8", + "@esbuild/linux-s390x": "0.19.8", + "@esbuild/linux-x64": "0.19.8", + "@esbuild/netbsd-x64": "0.19.8", + "@esbuild/openbsd-x64": "0.19.8", + "@esbuild/sunos-x64": "0.19.8", + "@esbuild/win32-arm64": "0.19.8", + "@esbuild/win32-ia32": "0.19.8", + "@esbuild/win32-x64": "0.19.8" } }, "node_modules/escape-string-regexp": { @@ -3555,9 +3533,9 @@ "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", "dev": true, "hasInstallScript": true, "optional": true, @@ -5245,18 +5223,30 @@ } }, "node_modules/rollup": { - "version": "3.27.2", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-3.27.2.tgz", - "integrity": "sha512-YGwmHf7h2oUHkVBT248x0yt6vZkYQ3/rvE5iQuVBh3WO8GcJ6BNeOkpoX1yMHIiBm18EMLjBPIoUDkhgnyxGOQ==", + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.6.0.tgz", + "integrity": "sha512-R8i5Her4oO1LiMQ3jKf7MUglYV/mhQ5g5OKeld5CnkmPdIGo79FDDQYqPhq/PCVuTQVuxsWgIbDy9F+zdHn80w==", "dev": true, "bin": { "rollup": "dist/bin/rollup" }, "engines": { - "node": ">=14.18.0", + "node": ">=18.0.0", "npm": ">=8.0.0" }, "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.6.0", + "@rollup/rollup-android-arm64": "4.6.0", + "@rollup/rollup-darwin-arm64": "4.6.0", + "@rollup/rollup-darwin-x64": "4.6.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.6.0", + "@rollup/rollup-linux-arm64-gnu": "4.6.0", + "@rollup/rollup-linux-arm64-musl": "4.6.0", + "@rollup/rollup-linux-x64-gnu": "4.6.0", + "@rollup/rollup-linux-x64-musl": "4.6.0", + "@rollup/rollup-win32-arm64-msvc": "4.6.0", + "@rollup/rollup-win32-ia32-msvc": "4.6.0", + "@rollup/rollup-win32-x64-msvc": "4.6.0", "fsevents": "~2.3.2" } }, @@ -5805,9 +5795,9 @@ } }, "node_modules/typescript": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.2.2.tgz", - "integrity": "sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==", + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.2.tgz", + "integrity": "sha512-6l+RyNy7oAHDfxC4FzSJcz9vnjTKxrLpDG5M2Vu4SHRVNg6xzqZp6LYSR9zjqQTu8DU/f5xwxUdADOkbrIX2gQ==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -5920,29 +5910,29 @@ "optional": true }, "node_modules/vite": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.0.tgz", - "integrity": "sha512-ulr8rNLA6rkyFAlVWw2q5YJ91v098AFQ2R0PRFwPzREXOUJQPtFUG0t+/ZikhaOCDqFoDhN6/v8Sq0o4araFAw==", + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.2.tgz", + "integrity": "sha512-6CCq1CAJCNM1ya2ZZA7+jS2KgnhbzvxakmlIjN24cF/PXhRMzpM/z8QgsVJA/Dm5fWUWnVEsmtBoMhmerPxT0g==", "dev": true, "dependencies": { - "esbuild": "^0.18.10", - "postcss": "^8.4.27", - "rollup": "^3.27.1" + "esbuild": "^0.19.3", + "postcss": "^8.4.31", + "rollup": "^4.2.0" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^14.18.0 || >=16.0.0" + "node": "^18.0.0 || >=20.0.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" }, "optionalDependencies": { - "fsevents": "~2.3.2" + "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": ">= 14", + "@types/node": "^18.0.0 || >=20.0.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", diff --git a/playground/package.json b/playground/package.json index e69479faf4..d7c87d45f5 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,16 +17,16 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.37", - "@types/react-dom": "^18.2.15", + "@types/react": "^18.2.38", + "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.1", "@typescript-eslint/eslint-plugin": "^6.12.0", - "@typescript-eslint/parser": "^6.11.0", + "@typescript-eslint/parser": "^6.12.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.54.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.4", - "typescript": "^5.2.2", - "vite": "^4.5.0" + "typescript": "^5.3.2", + "vite": "^5.0.2" } } From 437dd3b892296253a51e5caf45bd3f90421d8a4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Nov 2023 04:11:45 -0500 Subject: [PATCH 09/60] bot: Bump @typescript-eslint/parser from 6.12.0 to 6.13.0 in /playground (#2085) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 6.12.0 to 6.13.0.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v6.13.0

6.13.0 (2023-11-27)

Note for eslint plugin authors and other API consumers

TypeScript v5.3 shipped with a type change that was incompatible with our types. This change has been fixed and will be released in v5.3.3. Until that time you can work around this error using skipLibCheck.

Bug Fixes

  • typescript-estree: ensure backwards compat with pre-5.3 import attributes (#7967) (810fc8c)

Features

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

6.13.0 (2023-11-27)

Note: Version bump only for package @​typescript-eslint/parser

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=6.12.0&new-version=6.13.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 90 ++++++++++++++++++++++++++++++++---- playground/package.json | 2 +- 2 files changed, 83 insertions(+), 9 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index c102e96425..882b0ac5d7 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.1", "@typescript-eslint/eslint-plugin": "^6.12.0", - "@typescript-eslint/parser": "^6.12.0", + "@typescript-eslint/parser": "^6.13.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.54.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2329,15 +2329,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.12.0.tgz", - "integrity": "sha512-s8/jNFPKPNRmXEnNXfuo1gemBdVmpQsK1pcu+QIvuNJuhFzGrpD7WjOcvDc/+uEdfzSYpNu7U/+MmbScjoQ6vg==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.13.0.tgz", + "integrity": "sha512-VpG+M7GNhHLI/aTDctqAV0XbzB16vf+qDX9DXuMZSe/0bahzDA9AKZB15NDbd+D9M4cDsJvfkbGOA7qiZ/bWJw==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.12.0", - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/typescript-estree": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0", + "@typescript-eslint/scope-manager": "6.13.0", + "@typescript-eslint/types": "6.13.0", + "@typescript-eslint/typescript-estree": "6.13.0", + "@typescript-eslint/visitor-keys": "6.13.0", "debug": "^4.3.4" }, "engines": { @@ -2356,6 +2356,80 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.0.tgz", + "integrity": "sha512-2x0K2/CujsokIv+LN2T0l5FVDMtsCjkUyYtlcY4xxnxLAW+x41LXr16duoicHpGtLhmtN7kqvuFJ3zbz00Ikhw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.13.0", + "@typescript-eslint/visitor-keys": "6.13.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.13.0.tgz", + "integrity": "sha512-oXg7DFxx/GmTrKXKKLSoR2rwiutOC7jCQ5nDH5p5VS6cmHE1TcPTaYQ0VPSSUvj7BnNqCgQ/NXcTBxn59pfPTQ==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.0.tgz", + "integrity": "sha512-IT4O/YKJDoiy/mPEDsfOfp+473A9GVqXlBKckfrAOuVbTqM8xbc0LuqyFCcgeFWpqu3WjQexolgqN2CuWBYbog==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.13.0", + "@typescript-eslint/visitor-keys": "6.13.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.0.tgz", + "integrity": "sha512-UQklteCEMCRoq/1UhKFZsHv5E4dN1wQSzJoxTfABasWk1HgJRdg1xNUve/Kv/Sdymt4x+iEzpESOqRFlQr/9Aw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.13.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "6.12.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", diff --git a/playground/package.json b/playground/package.json index d7c87d45f5..61f47c5dfe 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.1", "@typescript-eslint/eslint-plugin": "^6.12.0", - "@typescript-eslint/parser": "^6.12.0", + "@typescript-eslint/parser": "^6.13.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.54.0", "eslint-plugin-react-hooks": "^4.6.0", From c721bcbc561b6e9996d9f30ca6e9a54176844720 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 Nov 2023 05:17:08 -0800 Subject: [PATCH 10/60] bot: Bump @types/react from 18.2.38 to 18.2.39 in /playground (#2086) Bumps [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) from 18.2.38 to 18.2.39.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@types/react&package-manager=npm_and_yarn&previous-version=18.2.38&new-version=18.2.39)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 882b0ac5d7..71b67a4c40 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,7 +15,7 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.38", + "@types/react": "^18.2.39", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.1", "@typescript-eslint/eslint-plugin": "^6.12.0", @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.38", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.38.tgz", - "integrity": "sha512-cBBXHzuPtQK6wNthuVMV6IjHAFkdl/FOPFIlkd81/Cd1+IqkHu/A+w4g43kaQQoYHik/ruaQBDL72HyCy1vuMw==", + "version": "18.2.39", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.39.tgz", + "integrity": "sha512-Oiw+ppED6IremMInLV4HXGbfbG6GyziY3kqAwJYOR0PNbkYDmLWQA3a95EhdSmamsvbkJN96ZNN+YD+fGjzSBA==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", diff --git a/playground/package.json b/playground/package.json index 61f47c5dfe..1b406565ce 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,7 +17,7 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.38", + "@types/react": "^18.2.39", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.1", "@typescript-eslint/eslint-plugin": "^6.12.0", From 16b66ea2adf15be7cbbf68b4321cb41b7d7411c3 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Tue, 28 Nov 2023 10:23:04 -0500 Subject: [PATCH 11/60] ci(i): Upload code coverage even if some matrix jobs fail (#2089) ## Relevant issue(s) Resolves #2088 ## Description - Enable uploading whatever coverage reports we have (even if a matrix job failed). - Don't upload coverage reports if manually canceled the workflow/jobs. - Fix coverage reports that were overwritten because of OS name clashing (was uploading only 18 artifacts instead of 20). - Increase retention period to 7 days, for the coverage reports. For example: Currently windows build is flaky, and on a windows build failure it doesn't upload any code coverage reports to codecov. Before this PR the coverage upload job skips: https://github.com/sourcenetwork/defradb/actions/runs/7016532008/job/19088626645 ## Testing ### One Fail so 19 artifacts + Upload successful: Here is a test run to see this working in action: https://github.com/sourcenetwork/defradb/actions/runs/7018970859/attempts/1?pr=2089pr%3D2089 Note: even with 1 failure the build still uploads the report and the total artifact count is 19 (-1 due to windows failure). ### Passed with 20 artifacts + Upload successful: https://github.com/sourcenetwork/defradb/actions/runs/7018970859 --- .github/workflows/test-and-upload-coverage.yml | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-and-upload-coverage.yml b/.github/workflows/test-and-upload-coverage.yml index d92ae01cce..de7c9df848 100644 --- a/.github/workflows/test-and-upload-coverage.yml +++ b/.github/workflows/test-and-upload-coverage.yml @@ -95,18 +95,23 @@ jobs: if: ${{ !matrix.detect-changes }} uses: actions/upload-artifact@v3 with: - name: ${{ matrix.client-type }}_${{ matrix.database-type }}_${{ matrix.mutation-type }} + name: ${{ matrix.os }}_${{ matrix.client-type }}_${{ matrix.database-type }}_${{ matrix.mutation-type }} path: coverage.txt if-no-files-found: error - retention-days: 1 + retention-days: 7 upload-coverage: name: Upload test code coverage job - runs-on: ubuntu-latest - needs: run-tests + # Important to know: + # - We didn't use `if: always()` here, so this job doesn't run if we manually canceled. + # - `if: success()` is always implied unless `always()` or `failure()` is specified. + if: success() || failure() + + runs-on: ubuntu-latest + steps: - name: Checkout code into the directory uses: actions/checkout@v3 From 89445ad1688c54cbb8b0ec3524f6d612d9ec70a8 Mon Sep 17 00:00:00 2001 From: vuittont60 <81072379+vuittont60@users.noreply.github.com> Date: Tue, 5 Dec 2023 06:43:42 +0800 Subject: [PATCH 12/60] docs(i): Fix typos (#2094) ## Relevant issue(s) Resolves #2093 ## Description Fix some typos --- client/db.go | 6 +++--- client/lens.go | 6 +++--- docs/data_format_changes/i412-no-change-tests-updated.md | 2 +- .../query/simple/with_filter/with_like_string_test.go | 2 +- .../query/simple/with_filter/with_nlike_string_test.go | 2 +- .../schema/updates/add/field/create_update_test.go | 2 +- 6 files changed, 10 insertions(+), 10 deletions(-) diff --git a/client/db.go b/client/db.go index b1b63f29d6..59fb1ddc18 100644 --- a/client/db.go +++ b/client/db.go @@ -36,7 +36,7 @@ type DB interface { // NewConcurrentTxn returns a new transaction on the root store that may be managed externally. // - // It may be used with other functions in the client package. It is threadsafe and mutliple threads/Go routines + // It may be used with other functions in the client package. It is threadsafe and multiple threads/Go routines // can safely operate on it concurrently. NewConcurrentTxn(context.Context, bool) (datastore.Txn, error) @@ -123,14 +123,14 @@ type Store interface { // It will return an error if the provided schema version ID does not exist. SetDefaultSchemaVersion(context.Context, string) error - // SetMigration sets the migration for the given source-destination schema version IDs. Is equivilent to + // SetMigration sets the migration for the given source-destination schema version IDs. Is equivalent to // calling `LensRegistry().SetMigration(ctx, cfg)`. // // There may only be one migration per schema version id. If another migration was registered it will be // overwritten by this migration. // // Neither of the schema version IDs specified in the configuration need to exist at the time of calling. - // This is to allow the migration of documents of schema versions unknown to the local node recieved by the + // This is to allow the migration of documents of schema versions unknown to the local node received by the // P2P system. // // Migrations will only run if there is a complete path from the document schema version to the latest local diff --git a/client/lens.go b/client/lens.go index 7b1264275f..35ef9f1ee3 100644 --- a/client/lens.go +++ b/client/lens.go @@ -45,18 +45,18 @@ type LensConfig struct { type LensRegistry interface { // WithTxn returns a new LensRegistry scoped to the given transaction. // - // WARNING: Currently this does not provide snapshot isolation, if other transactions are commited + // WARNING: Currently this does not provide snapshot isolation, if other transactions are committed // after this has been created, the results of those commits will be visible within this scope. WithTxn(datastore.Txn) LensRegistry - // SetMigration sets the migration for the given source-destination schema version IDs. Is equivilent to + // SetMigration sets the migration for the given source-destination schema version IDs. Is equivalent to // calling `Store.SetMigration(ctx, cfg)`. // // There may only be one migration per schema version id. If another migration was registered it will be // overwritten by this migration. // // Neither of the schema version IDs specified in the configuration need to exist at the time of calling. - // This is to allow the migration of documents of schema versions unknown to the local node recieved by the + // This is to allow the migration of documents of schema versions unknown to the local node received by the // P2P system. // // Migrations will only run if there is a complete path from the document schema version to the latest local diff --git a/docs/data_format_changes/i412-no-change-tests-updated.md b/docs/data_format_changes/i412-no-change-tests-updated.md index 4146a4eeb7..3421303969 100644 --- a/docs/data_format_changes/i412-no-change-tests-updated.md +++ b/docs/data_format_changes/i412-no-change-tests-updated.md @@ -1,3 +1,3 @@ # Add tests for default properties -This is is not a breaking change, but the test schema was updated which resulted in a few false-failures in the change-detector tool. Not relevent to defra users. +This is is not a breaking change, but the test schema was updated which resulted in a few false-failures in the change-detector tool. Not relevant to defra users. diff --git a/tests/integration/query/simple/with_filter/with_like_string_test.go b/tests/integration/query/simple/with_filter/with_like_string_test.go index e38f58f212..00e53aed82 100644 --- a/tests/integration/query/simple/with_filter/with_like_string_test.go +++ b/tests/integration/query/simple/with_filter/with_like_string_test.go @@ -138,7 +138,7 @@ func TestQuerySimpleWithLikeStringContainsFilterBlockExactString(t *testing.T) { func TestQuerySimpleWithLikeStringContainsFilterBlockContainsStringMuplitpleResults(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with basic like-string filter with contains string mulitple results", + Description: "Simple query with basic like-string filter with contains string multiple results", Request: `query { Users(filter: {Name: {_like: "%Targaryen%"}}) { Name diff --git a/tests/integration/query/simple/with_filter/with_nlike_string_test.go b/tests/integration/query/simple/with_filter/with_nlike_string_test.go index 39f7d7e386..e1e825abd2 100644 --- a/tests/integration/query/simple/with_filter/with_nlike_string_test.go +++ b/tests/integration/query/simple/with_filter/with_nlike_string_test.go @@ -138,7 +138,7 @@ func TestQuerySimpleWithNotLikeStringContainsFilterBlockExactString(t *testing.T func TestQuerySimpleWithNotLikeStringContainsFilterBlockContainsStringMuplitpleResults(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with basic not like-string filter with contains string mulitple results", + Description: "Simple query with basic not like-string filter with contains string multiple results", Request: `query { Users(filter: {Name: {_nlike: "%Targaryen%"}}) { Name diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index 0228c205ed..7cf8af8480 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -21,7 +21,7 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi updatedSchemaVersionId := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" test := testUtils.TestCase{ - Description: "Test schema update, add field with update after schema update, verison join", + Description: "Test schema update, add field with update after schema update, version join", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` From 5003d5afcd29a05b0b01e3b3251dc546cc77d8d6 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 19:41:04 -0800 Subject: [PATCH 13/60] bot: Update dependencies (bulk dependabot PRs) 04-12-2023 (#2107) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2106 bot: Bump @types/swagger-ui-react from 4.18.1 to 4.18.3 in /playground #2104 bot: Bump eslint-plugin-react-refresh from 0.4.4 to 0.4.5 in /playground #2103 bot: Bump eslint from 8.54.0 to 8.55.0 in /playground #2102 bot: Bump @types/react from 18.2.39 to 18.2.41 in /playground #2101 bot: Bump golang.org/x/crypto from 0.15.0 to 0.16.0 #2100 bot: Bump golang.org/x/net from 0.18.0 to 0.19.0 #2099 bot: Bump github.com/ugorji/go/codec from 1.2.11 to 1.2.12 #2098 bot: Bump github.com/libp2p/go-libp2p-kad-dht from 0.25.1 to 0.25.2 ⚠️ The following PR had conflicts and was resolved / merged manually: #2105 bot: Bump @typescript-eslint/eslint-plugin from 6.12.0 to 6.13.1 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 10 +-- go.sum | 20 +++--- playground/package-lock.json | 120 +++++++++++++++++------------------ playground/package.json | 10 +-- 4 files changed, 80 insertions(+), 80 deletions(-) diff --git a/go.mod b/go.mod index 523cf9a22c..97a7332080 100644 --- a/go.mod +++ b/go.mod @@ -24,7 +24,7 @@ require ( github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c github.com/libp2p/go-libp2p v0.32.1 github.com/libp2p/go-libp2p-gostream v0.6.0 - github.com/libp2p/go-libp2p-kad-dht v0.25.1 + github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/libp2p/go-libp2p-pubsub v0.10.0 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mitchellh/mapstructure v1.5.0 @@ -40,15 +40,15 @@ require ( github.com/spf13/viper v1.17.0 github.com/stretchr/testify v1.8.4 github.com/tidwall/btree v1.7.0 - github.com/ugorji/go/codec v1.2.11 + github.com/ugorji/go/codec v1.2.12 github.com/valyala/fastjson v1.6.4 github.com/vito/go-sse v1.0.0 go.opentelemetry.io/otel/metric v1.21.0 go.opentelemetry.io/otel/sdk/metric v1.21.0 go.uber.org/zap v1.26.0 - golang.org/x/crypto v0.15.0 + golang.org/x/crypto v0.16.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa - golang.org/x/net v0.18.0 + golang.org/x/net v0.19.0 google.golang.org/grpc v1.59.0 google.golang.org/protobuf v1.31.0 ) @@ -182,7 +182,7 @@ require ( go.uber.org/multierr v1.11.0 // indirect golang.org/x/mod v0.14.0 // indirect golang.org/x/sync v0.5.0 // indirect - golang.org/x/sys v0.14.0 // indirect + golang.org/x/sys v0.15.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.15.0 // indirect gonum.org/v1/gonum v0.13.0 // indirect diff --git a/go.sum b/go.sum index 49e011e0ff..f5d83b2f7e 100644 --- a/go.sum +++ b/go.sum @@ -376,8 +376,8 @@ github.com/libp2p/go-libp2p-asn-util v0.3.0 h1:gMDcMyYiZKkocGXDQ5nsUQyquC9+H+iLE github.com/libp2p/go-libp2p-asn-util v0.3.0/go.mod h1:B1mcOrKUE35Xq/ASTmQ4tN3LNzVVaMNmq2NACuqyB9w= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA= -github.com/libp2p/go-libp2p-kad-dht v0.25.1 h1:ofFNrf6MMEy4vi3R1VbJ7LOcTn3Csh0cDcaWHTxtWNA= -github.com/libp2p/go-libp2p-kad-dht v0.25.1/go.mod h1:6za56ncRHYXX4Nc2vn8z7CZK0P4QiMcrn77acKLM2Oo= +github.com/libp2p/go-libp2p-kad-dht v0.25.2 h1:FOIk9gHoe4YRWXTu8SY9Z1d0RILol0TrtApsMDPjAVQ= +github.com/libp2p/go-libp2p-kad-dht v0.25.2/go.mod h1:6za56ncRHYXX4Nc2vn8z7CZK0P4QiMcrn77acKLM2Oo= github.com/libp2p/go-libp2p-kbucket v0.6.3 h1:p507271wWzpy2f1XxPzCQG9NiN6R6lHL9GiSErbQQo0= github.com/libp2p/go-libp2p-kbucket v0.6.3/go.mod h1:RCseT7AH6eJWxxk2ol03xtP9pEHetYSPXOaJnOiD8i0= github.com/libp2p/go-libp2p-pubsub v0.10.0 h1:wS0S5FlISavMaAbxyQn3dxMOe2eegMfswM471RuHJwA= @@ -608,8 +608,8 @@ github.com/textileio/go-log/v2 v2.1.3-gke-2 h1:YkMA5ua0Cf/X6CkbexInsoJ/HdaHQBlgi github.com/textileio/go-log/v2 v2.1.3-gke-2/go.mod h1:DwACkjFS3kjZZR/4Spx3aPfSsciyslwUe5bxV8CEU2w= github.com/tidwall/btree v1.7.0 h1:L1fkJH/AuEh5zBnnBbmTwQ5Lt+bRJ5A8EWecslvo9iI= github.com/tidwall/btree v1.7.0/go.mod h1:twD9XRA5jj9VUQGELzDO4HPQTNJsoWWfYEL+EUQ2cKY= -github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= -github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE= +github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/valyala/fastjson v1.6.4 h1:uAUNq9Z6ymTgGhcm0UynUAB6tlbakBrz6CQFax3BXVQ= @@ -685,8 +685,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= -golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= +golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= +golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -766,8 +766,8 @@ golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= -golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= +golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -847,8 +847,8 @@ golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= diff --git a/playground/package-lock.json b/playground/package-lock.json index 71b67a4c40..8ff9587c64 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,15 +15,15 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.39", + "@types/react": "^18.2.41", "@types/react-dom": "^18.2.17", - "@types/swagger-ui-react": "^4.18.1", - "@typescript-eslint/eslint-plugin": "^6.12.0", + "@types/swagger-ui-react": "^4.18.3", + "@typescript-eslint/eslint-plugin": "^6.13.1", "@typescript-eslint/parser": "^6.13.0", "@vitejs/plugin-react-swc": "^3.5.0", - "eslint": "^8.54.0", + "eslint": "^8.55.0", "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.4.4", + "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.2", "vite": "^5.0.2" } @@ -488,9 +488,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.3.tgz", - "integrity": "sha512-yZzuIG+jnVu6hNSzFEN07e8BxF3uAzYtQb6uDkaYZLo6oYZDCq454c5kB8zxnzfCYyP4MIuyBn10L0DqwujTmA==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", + "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, "dependencies": { "ajv": "^6.12.4", @@ -511,9 +511,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.54.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.54.0.tgz", - "integrity": "sha512-ut5V+D+fOoWPgGGNj83GGjnntO39xDy6DWxO0wb7Jp3DcMX0TfIqdzHF85VTQkerdyGmuuMD9AKAo5KiNlf/AQ==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz", + "integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.39", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.39.tgz", - "integrity": "sha512-Oiw+ppED6IremMInLV4HXGbfbG6GyziY3kqAwJYOR0PNbkYDmLWQA3a95EhdSmamsvbkJN96ZNN+YD+fGjzSBA==", + "version": "18.2.41", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.41.tgz", + "integrity": "sha512-CwOGr/PiLiNBxEBqpJ7fO3kocP/2SSuC9fpH5K7tusrg4xPSRT/193rzolYwQnTN02We/ATXKnb6GqA5w4fRxw==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -2267,9 +2267,9 @@ "dev": true }, "node_modules/@types/swagger-ui-react": { - "version": "4.18.1", - "resolved": "https://registry.npmjs.org/@types/swagger-ui-react/-/swagger-ui-react-4.18.1.tgz", - "integrity": "sha512-nYhNi+cyN78vve1/QY5PNKYzHYlDKETtXj+gQAhuoCRB+GxGT3MVJUj8WCdwYj4vF0s1j68qkLv/66DGe5ZlnA==", + "version": "4.18.3", + "resolved": "https://registry.npmjs.org/@types/swagger-ui-react/-/swagger-ui-react-4.18.3.tgz", + "integrity": "sha512-Mo/R7IjDVwtiFPs84pWvh5pI9iyNGBjmfielxqbOh2Jv+8WVSDVe8Nu25kb5BOuV2xmGS3o33jr6nwDJMBcX+Q==", "dev": true, "dependencies": { "@types/react": "*" @@ -2294,16 +2294,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.12.0.tgz", - "integrity": "sha512-XOpZ3IyJUIV1b15M7HVOpgQxPPF7lGXgsfcEIu3yDxFPaf/xZKt7s9QO/pbk7vpWQyVulpJbu4E5LwpZiQo4kA==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.13.1.tgz", + "integrity": "sha512-5bQDGkXaxD46bPvQt08BUz9YSaO4S0fB1LB5JHQuXTfkGPI3+UUeS387C/e9jRie5GqT8u5kFTrMvAjtX4O5kA==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.12.0", - "@typescript-eslint/type-utils": "6.12.0", - "@typescript-eslint/utils": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0", + "@typescript-eslint/scope-manager": "6.13.1", + "@typescript-eslint/type-utils": "6.13.1", + "@typescript-eslint/utils": "6.13.1", + "@typescript-eslint/visitor-keys": "6.13.1", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2431,13 +2431,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.12.0.tgz", - "integrity": "sha512-5gUvjg+XdSj8pcetdL9eXJzQNTl3RD7LgUiYTl8Aabdi8hFkaGSYnaS6BLc0BGNaDH+tVzVwmKtWvu0jLgWVbw==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.1.tgz", + "integrity": "sha512-BW0kJ7ceiKi56GbT2KKzZzN+nDxzQK2DS6x0PiSMPjciPgd/JRQGMibyaN2cPt2cAvuoH0oNvn2fwonHI+4QUQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0" + "@typescript-eslint/types": "6.13.1", + "@typescript-eslint/visitor-keys": "6.13.1" }, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2448,13 +2448,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.12.0.tgz", - "integrity": "sha512-WWmRXxhm1X8Wlquj+MhsAG4dU/Blvf1xDgGaYCzfvStP2NwPQh6KBvCDbiOEvaE0filhranjIlK/2fSTVwtBng==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.13.1.tgz", + "integrity": "sha512-A2qPlgpxx2v//3meMqQyB1qqTg1h1dJvzca7TugM3Yc2USDY+fsRBiojAEo92HO7f5hW5mjAUF6qobOPzlBCBQ==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.12.0", - "@typescript-eslint/utils": "6.12.0", + "@typescript-eslint/typescript-estree": "6.13.1", + "@typescript-eslint/utils": "6.13.1", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2475,9 +2475,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.12.0.tgz", - "integrity": "sha512-MA16p/+WxM5JG/F3RTpRIcuOghWO30//VEOvzubM8zuOOBYXsP+IfjoCXXiIfy2Ta8FRh9+IO9QLlaFQUU+10Q==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.13.1.tgz", + "integrity": "sha512-gjeEskSmiEKKFIbnhDXUyiqVma1gRCQNbVZ1C8q7Zjcxh3WZMbzWVfGE9rHfWd1msQtPS0BVD9Jz9jded44eKg==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2488,13 +2488,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.12.0.tgz", - "integrity": "sha512-vw9E2P9+3UUWzhgjyyVczLWxZ3GuQNT7QpnIY3o5OMeLO/c8oHljGc8ZpryBMIyympiAAaKgw9e5Hl9dCWFOYw==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.1.tgz", + "integrity": "sha512-sBLQsvOC0Q7LGcUHO5qpG1HxRgePbT6wwqOiGLpR8uOJvPJbfs0mW3jPA3ujsDvfiVwVlWUDESNXv44KtINkUQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/visitor-keys": "6.12.0", + "@typescript-eslint/types": "6.13.1", + "@typescript-eslint/visitor-keys": "6.13.1", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2515,17 +2515,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.12.0.tgz", - "integrity": "sha512-LywPm8h3tGEbgfyjYnu3dauZ0U7R60m+miXgKcZS8c7QALO9uWJdvNoP+duKTk2XMWc7/Q3d/QiCuLN9X6SWyQ==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.13.1.tgz", + "integrity": "sha512-ouPn/zVoan92JgAegesTXDB/oUp6BP1v8WpfYcqh649ejNc9Qv+B4FF2Ff626kO1xg0wWwwG48lAJ4JuesgdOw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.12.0", - "@typescript-eslint/types": "6.12.0", - "@typescript-eslint/typescript-estree": "6.12.0", + "@typescript-eslint/scope-manager": "6.13.1", + "@typescript-eslint/types": "6.13.1", + "@typescript-eslint/typescript-estree": "6.13.1", "semver": "^7.5.4" }, "engines": { @@ -2540,12 +2540,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.12.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.12.0.tgz", - "integrity": "sha512-rg3BizTZHF1k3ipn8gfrzDXXSFKyOEB5zxYXInQ6z0hUvmQlhaZQzK+YmHmNViMA9HzW5Q9+bPPt90bU6GQwyw==", + "version": "6.13.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.1.tgz", + "integrity": "sha512-NDhQUy2tg6XGNBGDRm1XybOHSia8mcXmlbKWoQP+nm1BIIMxa55shyJfZkHpEBN62KNPLrocSM2PdPcaLgDKMQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.12.0", + "@typescript-eslint/types": "6.13.1", "eslint-visitor-keys": "^3.4.1" }, "engines": { @@ -3186,15 +3186,15 @@ } }, "node_modules/eslint": { - "version": "8.54.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.54.0.tgz", - "integrity": "sha512-NY0DfAkM8BIZDVl6PgSa1ttZbx3xHgJzSNJKYcQglem6CppHyMhRIQkBVSSMaSRnLhig3jsDbEzOjwCVt4AmmA==", + "version": "8.55.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz", + "integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", - "@eslint/eslintrc": "^2.1.3", - "@eslint/js": "8.54.0", + "@eslint/eslintrc": "^2.1.4", + "@eslint/js": "8.55.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -3253,9 +3253,9 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.4.tgz", - "integrity": "sha512-eD83+65e8YPVg6603Om2iCIwcQJf/y7++MWm4tACtEswFLYMwxwVWAfwN+e19f5Ad/FOyyNg9Dfi5lXhH3Y3rA==", + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.5.tgz", + "integrity": "sha512-D53FYKJa+fDmZMtriODxvhwrO+IOqrxoEo21gMA0sjHdU6dPVH4OhyFip9ypl8HOF5RV5KdTo+rBQLvnY2cO8w==", "dev": true, "peerDependencies": { "eslint": ">=7" diff --git a/playground/package.json b/playground/package.json index 1b406565ce..949a932f25 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,15 +17,15 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.39", + "@types/react": "^18.2.41", "@types/react-dom": "^18.2.17", - "@types/swagger-ui-react": "^4.18.1", - "@typescript-eslint/eslint-plugin": "^6.12.0", + "@types/swagger-ui-react": "^4.18.3", + "@typescript-eslint/eslint-plugin": "^6.13.1", "@typescript-eslint/parser": "^6.13.0", "@vitejs/plugin-react-swc": "^3.5.0", - "eslint": "^8.54.0", + "eslint": "^8.55.0", "eslint-plugin-react-hooks": "^4.6.0", - "eslint-plugin-react-refresh": "^0.4.4", + "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.2", "vite": "^5.0.2" } From 851e6c44a83c1928c1e3cbb4d91fa722b6796975 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Dec 2023 16:50:40 -0500 Subject: [PATCH 14/60] bot: Bump @types/react from 18.2.41 to 18.2.42 in /playground (#2108) Bumps [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) from 18.2.41 to 18.2.42.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@types/react&package-manager=npm_and_yarn&previous-version=18.2.41&new-version=18.2.42)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 8ff9587c64..4e178dbc21 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,7 +15,7 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.41", + "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.1", @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.41", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.41.tgz", - "integrity": "sha512-CwOGr/PiLiNBxEBqpJ7fO3kocP/2SSuC9fpH5K7tusrg4xPSRT/193rzolYwQnTN02We/ATXKnb6GqA5w4fRxw==", + "version": "18.2.42", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.42.tgz", + "integrity": "sha512-c1zEr96MjakLYus/wPnuWDo1/zErfdU9rNsIGmE+NV71nx88FG9Ttgo5dqorXTu/LImX2f63WBP986gJkMPNbA==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", diff --git a/playground/package.json b/playground/package.json index 949a932f25..ddd19ae28c 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,7 +17,7 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.41", + "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.1", From ca327d4fef02523195bbd16f5e5e2e414d2a21fa Mon Sep 17 00:00:00 2001 From: John-Alan Simmons Date: Tue, 5 Dec 2023 09:37:42 -0500 Subject: [PATCH 15/60] chore(i): Updated discord links in docs and readme --- .goreleaser.yaml | 2 +- CHANGELOG.md | 14 +++++++------- CONTRIBUTING.md | 2 +- README.md | 4 ++-- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.goreleaser.yaml b/.goreleaser.yaml index 4304075c08..87c1561a22 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -54,7 +54,7 @@ release: header: | DefraDB v{{ .Major }}.{{ .Minor }} is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. - To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v{{ .Major }}.{{ .Minor }}.x databases. If you need help migrating an existing deployment, reach out at hello@source.network or join our Discord at https://discord.source.network/. + To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v{{ .Major }}.{{ .Minor }}.x databases. If you need help migrating an existing deployment, reach out at hello@source.network or join our Discord at https://discord.gg/w7jYQVJ/. name_template: "v{{ .Version }} Release" changelog: diff --git a/CHANGELOG.md b/CHANGELOG.md index b42673927b..e5bcd6cfbe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ DefraDB v0.8 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. -To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.7.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.7.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. ### Features @@ -81,7 +81,7 @@ DefraDB v0.7 is a major pre-production release. Until the stable version 1.0 is This release has focused on robustness, testing, and schema management. Some highlight new features include notable expansions to the expressiveness of schema migrations. -To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.6.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.6.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. ### Features @@ -154,7 +154,7 @@ DefraDB v0.6 is a major pre-production release. Until the stable version 1.0 is There are several new and powerful features, important bug fixes, and notable refactors in this release. Some highlight features include: The initial release of our LensVM based schema migration engine powered by WebAssembly ([#1650](https://github.com/sourcenetwork/defradb/issues/1650)), newly embedded DefraDB Playround which includes a bundled GraphQL client and schema manager, and last but not least a relation field (_id) alias to improve the developer experience ([#1609](https://github.com/sourcenetwork/defradb/issues/1609)). -To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.5.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.5.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. ### Features @@ -329,7 +329,7 @@ There many new features in this release, but most importantly, this is the first To get a full outline of the changes, we invite you to review the official changelog below. Some highlights are the first iteration of our schema update system, allowing developers to add new fields to schemas using our JSON Patch based DDL, a new DAG based delete system which will persist "soft-delete" ops into the CRDT Merkle DAG, and a early prototype for our collection level peer-to-peer synchronization. -This release does include a Breaking Change to existing v0.4.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. +This release does include a Breaking Change to existing v0.4.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. ### Features @@ -469,7 +469,7 @@ DefraDB v0.4 is a major pre-production release. Until the stable version 1.0 is There are various new features in this release - some of which are breaking - and we invite you to review the official changelog below. Some highlights are persistence of replicators, DateTime scalars, TLS support, and GQL subscriptions. -This release does include a Breaking Change to existing v0.3.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. +This release does include a Breaking Change to existing v0.3.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. ### Features @@ -642,7 +642,7 @@ DefraDB v0.3 is a major pre-production release. Until the stable version 1.0 is There are *several* new features in this release, and we invite you to review the official changelog below. Some highlights are various new features for Grouping & Aggregation for the query system, like top-level aggregation and group filtering. Moreover, a brand new Query Explain system was added to introspect the execution plans created by DefraDB. Lastly we introduced a revamped CLI configuration system. -This release does include a Breaking Change to existing v0.2.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.source.network/. +This release does include a Breaking Change to existing v0.2.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. ### Features @@ -877,7 +877,7 @@ This release is jam-packed with new features and a small number of breaking chan Much more than just that has been added to ensure we're building reliable software expected of any database, such as expanded test & benchmark suites, automated bug detection, performance gains, and more. -This release does include a Breaking Change to existing v0.1 databases regarding the internal data model, which affects the "Content Identifiers" we use to generate DocKeys and VersionIDs. If you need help migrating an existing deployment, reach out at hello@source.network or join our Discord at https://discord.source.network. +This release does include a Breaking Change to existing v0.1 databases regarding the internal data model, which affects the "Content Identifiers" we use to generate DocKeys and VersionIDs. If you need help migrating an existing deployment, reach out at hello@source.network or join our Discord at https://discord.gg/w7jYQVJ. ### Features diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c7cfb9b590..c590fa54f4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -5,7 +5,7 @@ This document will guide you through the process of contributing to the project. All contributions are appreciated, whether it's identifying problems, highlighting missing features, or contributing to the codebase in simple or complex ways. -You are encouraged to join the [Source Network Discord](discord.source.network) to discuss ideas, ask questions, and find inspiration for future developments. +You are encouraged to join the [Source Network Discord](discord.gg/w7jYQVJ) to discuss ideas, ask questions, and find inspiration for future developments. ## Getting started To get started, clone the repository, build, and run it: diff --git a/README.md b/README.md index d77dd18f48..8ae2ebfb44 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ ![Tests Workflow](https://github.com/sourcenetwork/defradb/actions/workflows/test-and-upload-coverage.yml/badge.svg) [![Go Report Card](https://goreportcard.com/badge/github.com/sourcenetwork/defradb)](https://goreportcard.com/report/github.com/sourcenetwork/defradb) [![codecov](https://codecov.io/gh/sourcenetwork/defradb/branch/develop/graph/badge.svg?token=RHAORX13PA)](https://codecov.io/gh/sourcenetwork/defradb) -[![Discord](https://img.shields.io/discord/427944769851752448.svg?color=768AD4&label=discord&logo=https%3A%2F%2Fdiscordapp.com%2Fassets%2F8c9701b98ad4372b58f13fd9f65f966e.svg)](https://discord.source.network/) +[![Discord](https://img.shields.io/discord/427944769851752448.svg?color=768AD4&label=discord&logo=https%3A%2F%2Fdiscordapp.com%2Fassets%2F8c9701b98ad4372b58f13fd9f65f966e.svg)](https://discord.gg/w7jYQVJ) [![Twitter Follow](https://img.shields.io/twitter/follow/sourcenetwrk.svg?label=&style=social)](https://twitter.com/sourcenetwrk)

@@ -432,7 +432,7 @@ defradb client backup import path/to/backup.json ## Community -Discuss on [Discord](https://discord.source.network/) or [Github Discussions](https://github.com/sourcenetwork/defradb/discussions). The Source project is on [Twitter](https://twitter.com/sourcenetwrk). +Discuss on [Discord](https://discord.gg/w7jYQVJ) or [Github Discussions](https://github.com/sourcenetwork/defradb/discussions). The Source project is on [Twitter](https://twitter.com/sourcenetwrk). ## Licensing From 51b20bbc15071da28808159202450c2a778d749b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 06:51:34 -0500 Subject: [PATCH 16/60] bot: Bump vite from 5.0.2 to 5.0.5 in /playground (#2112) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 5.0.2 to 5.0.5.

Changelog

Sourced from vite's changelog.

5.0.5 (2023-12-04)

5.0.4 (2023-11-29)

5.0.3 (2023-11-28)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=vite&package-manager=npm_and_yarn&previous-version=5.0.2&new-version=5.0.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/sourcenetwork/defradb/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 24 ++++++++++++------------ playground/package.json | 2 +- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 4e178dbc21..76a32516fd 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -25,7 +25,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.2", - "vite": "^5.0.2" + "vite": "^5.0.5" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -4400,9 +4400,9 @@ "optional": true }, "node_modules/nanoid": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", - "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", "dev": true, "funding": [ { @@ -4719,9 +4719,9 @@ } }, "node_modules/postcss": { - "version": "8.4.31", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", - "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", + "version": "8.4.32", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.32.tgz", + "integrity": "sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==", "dev": true, "funding": [ { @@ -4738,7 +4738,7 @@ } ], "dependencies": { - "nanoid": "^3.3.6", + "nanoid": "^3.3.7", "picocolors": "^1.0.0", "source-map-js": "^1.0.2" }, @@ -5984,13 +5984,13 @@ "optional": true }, "node_modules/vite": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.2.tgz", - "integrity": "sha512-6CCq1CAJCNM1ya2ZZA7+jS2KgnhbzvxakmlIjN24cF/PXhRMzpM/z8QgsVJA/Dm5fWUWnVEsmtBoMhmerPxT0g==", + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.5.tgz", + "integrity": "sha512-OekeWqR9Ls56f3zd4CaxzbbS11gqYkEiBtnWFFgYR2WV8oPJRRKq0mpskYy/XaoCL3L7VINDhqqOMNDiYdGvGg==", "dev": true, "dependencies": { "esbuild": "^0.19.3", - "postcss": "^8.4.31", + "postcss": "^8.4.32", "rollup": "^4.2.0" }, "bin": { diff --git a/playground/package.json b/playground/package.json index ddd19ae28c..7a6315b02a 100644 --- a/playground/package.json +++ b/playground/package.json @@ -27,6 +27,6 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.2", - "vite": "^5.0.2" + "vite": "^5.0.5" } } From 9084218a5047c8db5d77974161b08a7fde912f5a Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Thu, 7 Dec 2023 09:12:23 -0800 Subject: [PATCH 17/60] feat: Add blob scalar type (#2091) ## Relevant issue(s) Resolves #2090 ## Description This PR adds a blob scalar type to the schema system. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Added integration tests. Specify the platform(s) on which this was tested: - MacOS --- client/descriptions.go | 5 +- db/collection_update.go | 3 + db/index.go | 9 ++ db/indexed_docs_test.go | 3 + request/graphql/schema/collection.go | 3 + request/graphql/schema/descriptions.go | 6 +- request/graphql/schema/manager.go | 3 + request/graphql/schema/types/scalars.go | 65 +++++++++ request/graphql/schema/types/scalars_test.go | 88 +++++++++++ .../mutation/update/field_kinds/blob_test.go | 60 ++++++++ tests/integration/schema/simple_test.go | 46 ++++++ .../updates/add/field/kind/blob_test.go | 137 ++++++++++++++++++ .../updates/add/field/kind/invalid_test.go | 24 --- 13 files changed, 426 insertions(+), 26 deletions(-) create mode 100644 request/graphql/schema/types/scalars.go create mode 100644 request/graphql/schema/types/scalars_test.go create mode 100644 tests/integration/mutation/update/field_kinds/blob_test.go create mode 100644 tests/integration/schema/updates/add/field/kind/blob_test.go diff --git a/client/descriptions.go b/client/descriptions.go index f9a262e438..96f68108b4 100644 --- a/client/descriptions.go +++ b/client/descriptions.go @@ -146,6 +146,8 @@ func (f FieldKind) String() string { return "[String]" case FieldKind_STRING_ARRAY: return "[String!]" + case FieldKind_BLOB: + return "Blob" default: return fmt.Sprint(uint8(f)) } @@ -166,7 +168,7 @@ const ( FieldKind_DATETIME FieldKind = 10 FieldKind_STRING FieldKind = 11 FieldKind_STRING_ARRAY FieldKind = 12 - _ FieldKind = 13 // safe to repurpose (was never used) + FieldKind_BLOB FieldKind = 13 _ FieldKind = 14 // safe to repurpose (was never used) _ FieldKind = 15 // safe to repurpose (was never used) @@ -204,6 +206,7 @@ var FieldKindStringToEnumMapping = map[string]FieldKind{ "String": FieldKind_STRING, "[String]": FieldKind_NILLABLE_STRING_ARRAY, "[String!]": FieldKind_STRING_ARRAY, + "Blob": FieldKind_BLOB, } // RelationType describes the type of relation between two types. diff --git a/db/collection_update.go b/db/collection_update.go index c68902db44..e6dbc4617a 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -470,6 +470,9 @@ func validateFieldSchema(val *fastjson.Value, field client.FieldDescription) (an case client.FieldKind_FOREIGN_OBJECT, client.FieldKind_FOREIGN_OBJECT_ARRAY: return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) + + case client.FieldKind_BLOB: + return getString(val) } return nil, client.NewErrUnhandledType("FieldKind", field.Kind) diff --git a/db/index.go b/db/index.go index ce9e55f519..5d43bddb21 100644 --- a/db/index.go +++ b/db/index.go @@ -18,6 +18,7 @@ import ( "github.com/sourcenetwork/defradb/core" "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/errors" + "github.com/sourcenetwork/defradb/request/graphql/schema/types" ) // CollectionIndex is an interface for collection indexes @@ -51,6 +52,14 @@ func getValidateIndexFieldFunc(kind client.FieldKind) func(any) bool { return canConvertIndexFieldValue[float64] case client.FieldKind_BOOL: return canConvertIndexFieldValue[bool] + case client.FieldKind_BLOB: + return func(val any) bool { + blobStrVal, ok := val.(string) + if !ok { + return false + } + return types.BlobPattern.MatchString(blobStrVal) + } case client.FieldKind_DATETIME: return func(val any) bool { timeStrVal, ok := val.(string) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index f1f8d6270f..bb569bdc6c 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -421,6 +421,8 @@ func TestNonUnique_StoringIndexedFieldValueOfDifferentTypes(t *testing.T) { {Name: "invalid bool", FieldKind: client.FieldKind_BOOL, FieldVal: "invalid", ShouldFail: true}, {Name: "invalid datetime", FieldKind: client.FieldKind_DATETIME, FieldVal: nowStr[1:], ShouldFail: true}, {Name: "invalid datetime type", FieldKind: client.FieldKind_DATETIME, FieldVal: 1, ShouldFail: true}, + {Name: "invalid blob", FieldKind: client.FieldKind_BLOB, FieldVal: "invalid", ShouldFail: true}, + {Name: "invalid blob type", FieldKind: client.FieldKind_BLOB, FieldVal: 1, ShouldFail: true}, {Name: "valid int", FieldKind: client.FieldKind_INT, FieldVal: 12}, {Name: "valid float", FieldKind: client.FieldKind_FLOAT, FieldVal: 36.654}, @@ -428,6 +430,7 @@ func TestNonUnique_StoringIndexedFieldValueOfDifferentTypes(t *testing.T) { {Name: "valid bool false", FieldKind: client.FieldKind_BOOL, FieldVal: false}, {Name: "valid datetime string", FieldKind: client.FieldKind_DATETIME, FieldVal: nowStr}, {Name: "valid empty string", FieldKind: client.FieldKind_STRING, FieldVal: ""}, + {Name: "valid blob type", FieldKind: client.FieldKind_BLOB, FieldVal: "00ff"}, } for i, tc := range testCase { diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index 15a6283acb..ed77a9d614 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -331,6 +331,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { typeFloat string = "Float" typeDateTime string = "DateTime" typeString string = "String" + typeBlob string = "Blob" ) switch astTypeVal := t.(type) { @@ -379,6 +380,8 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { return client.FieldKind_DATETIME, nil case typeString: return client.FieldKind_STRING, nil + case typeBlob: + return client.FieldKind_BLOB, nil default: return client.FieldKind_FOREIGN_OBJECT, nil } diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index 1aabee729e..f267ae8ed0 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -14,6 +14,7 @@ import ( gql "github.com/sourcenetwork/graphql-go" "github.com/sourcenetwork/defradb/client" + schemaTypes "github.com/sourcenetwork/defradb/request/graphql/schema/types" ) var ( @@ -31,9 +32,10 @@ var ( gql.String: client.FieldKind_STRING, &gql.Object{}: client.FieldKind_FOREIGN_OBJECT, &gql.List{}: client.FieldKind_FOREIGN_OBJECT_ARRAY, + // Custom scalars + schemaTypes.BlobScalarType: client.FieldKind_BLOB, // More custom ones to come // - JSON - // - ByteArray // - Counters } @@ -52,6 +54,7 @@ var ( client.FieldKind_STRING: gql.String, client.FieldKind_STRING_ARRAY: gql.NewList(gql.NewNonNull(gql.String)), client.FieldKind_NILLABLE_STRING_ARRAY: gql.NewList(gql.String), + client.FieldKind_BLOB: schemaTypes.BlobScalarType, } // This map is fine to use @@ -70,6 +73,7 @@ var ( client.FieldKind_STRING: client.LWW_REGISTER, client.FieldKind_STRING_ARRAY: client.LWW_REGISTER, client.FieldKind_NILLABLE_STRING_ARRAY: client.LWW_REGISTER, + client.FieldKind_BLOB: client.LWW_REGISTER, client.FieldKind_FOREIGN_OBJECT: client.NONE_CRDT, client.FieldKind_FOREIGN_OBJECT_ARRAY: client.NONE_CRDT, } diff --git a/request/graphql/schema/manager.go b/request/graphql/schema/manager.go index 0f96ec2a29..f44b770fcb 100644 --- a/request/graphql/schema/manager.go +++ b/request/graphql/schema/manager.go @@ -141,6 +141,9 @@ func defaultTypes() []gql.Type { gql.Int, gql.String, + // Custom Scalar types + schemaTypes.BlobScalarType, + // Base Query types // Sort/Order enum diff --git a/request/graphql/schema/types/scalars.go b/request/graphql/schema/types/scalars.go new file mode 100644 index 0000000000..a0e9dca369 --- /dev/null +++ b/request/graphql/schema/types/scalars.go @@ -0,0 +1,65 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package types + +import ( + "encoding/hex" + "regexp" + + "github.com/sourcenetwork/graphql-go" + "github.com/sourcenetwork/graphql-go/language/ast" +) + +// BlobPattern is a regex for validating blob hex strings +var BlobPattern = regexp.MustCompile("^[0-9a-fA-F]+$") + +// coerceBlob converts the given value into a valid hex string. +// If the value cannot be converted nil is returned. +func coerceBlob(value any) any { + switch value := value.(type) { + case []byte: + return hex.EncodeToString(value) + + case *[]byte: + return coerceBlob(*value) + + case string: + if !BlobPattern.MatchString(value) { + return nil + } + return value + + case *string: + return coerceBlob(*value) + + default: + return nil + } +} + +var BlobScalarType = graphql.NewScalar(graphql.ScalarConfig{ + Name: "Blob", + Description: "The `Blob` scalar type represents a binary large object.", + // Serialize converts the value to a hex string + Serialize: coerceBlob, + // ParseValue converts the value to a hex string + ParseValue: coerceBlob, + // ParseLiteral converts the ast value to a hex string + ParseLiteral: func(valueAST ast.Value) any { + switch valueAST := valueAST.(type) { + case *ast.StringValue: + return coerceBlob(valueAST.Value) + default: + // return nil if the value cannot be parsed + return nil + } + }, +}) diff --git a/request/graphql/schema/types/scalars_test.go b/request/graphql/schema/types/scalars_test.go new file mode 100644 index 0000000000..5126f2e6a2 --- /dev/null +++ b/request/graphql/schema/types/scalars_test.go @@ -0,0 +1,88 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package types + +import ( + "testing" + + "github.com/sourcenetwork/graphql-go/language/ast" + "github.com/stretchr/testify/assert" +) + +func TestBlobScalarTypeSerialize(t *testing.T) { + stringInput := "00ff" + bytesInput := []byte{0, 255} + + cases := []struct { + input any + expect any + }{ + {stringInput, "00ff"}, + {&stringInput, "00ff"}, + {bytesInput, "00ff"}, + {&bytesInput, "00ff"}, + {nil, nil}, + {0, nil}, + {false, nil}, + } + for _, c := range cases { + result := BlobScalarType.Serialize(c.input) + assert.Equal(t, c.expect, result) + } +} + +func TestBlobScalarTypeParseValue(t *testing.T) { + stringInput := "00ff" + bytesInput := []byte{0, 255} + // invalid string containing non-hex characters + invalidHexString := "!@#$%^&*" + + cases := []struct { + input any + expect any + }{ + {stringInput, "00ff"}, + {&stringInput, "00ff"}, + {bytesInput, "00ff"}, + {&bytesInput, "00ff"}, + {invalidHexString, nil}, + {&invalidHexString, nil}, + {nil, nil}, + {0, nil}, + {false, nil}, + } + for _, c := range cases { + result := BlobScalarType.ParseValue(c.input) + assert.Equal(t, c.expect, result) + } +} + +func TestBlobScalarTypeParseLiteral(t *testing.T) { + cases := []struct { + input ast.Value + expect any + }{ + {&ast.StringValue{Value: "00ff"}, "00ff"}, + {&ast.StringValue{Value: "00!@#$%^&*"}, nil}, + {&ast.StringValue{Value: "!@#$%^&*00"}, nil}, + {&ast.IntValue{}, nil}, + {&ast.BooleanValue{}, nil}, + {&ast.NullValue{}, nil}, + {&ast.EnumValue{}, nil}, + {&ast.FloatValue{}, nil}, + {&ast.ListValue{}, nil}, + {&ast.ObjectValue{}, nil}, + } + for _, c := range cases { + result := BlobScalarType.ParseLiteral(c.input) + assert.Equal(t, c.expect, result) + } +} diff --git a/tests/integration/mutation/update/field_kinds/blob_test.go b/tests/integration/mutation/update/field_kinds/blob_test.go new file mode 100644 index 0000000000..4445c45bba --- /dev/null +++ b/tests/integration/mutation/update/field_kinds/blob_test.go @@ -0,0 +1,60 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package field_kinds + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestMutationUpdate_WithBlobField(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple update of blob field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + data: Blob + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "data": "00FE" + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "data": "00FF" + }`, + }, + testUtils.Request{ + Request: ` + query { + Users { + data + } + } + `, + Results: []map[string]any{ + { + "data": "00FF", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index 6bcb2a1dec..9fa0eb021a 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -271,3 +271,49 @@ func TestSchemaSimpleErrorsGivenNonNullManyRelationField(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestSchemaSimpleCreatesSchemaGivenTypeWithBlobField(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + data: Blob + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "Users") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "Users", + "fields": DefaultFields.Append( + Field{ + "name": "data", + "type": map[string]any{ + "kind": "SCALAR", + "name": "Blob", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/kind/blob_test.go b/tests/integration/schema/updates/add/field/kind/blob_test.go new file mode 100644 index 0000000000..badbdc56fe --- /dev/null +++ b/tests/integration/schema/updates/add/field/kind/blob_test.go @@ -0,0 +1,137 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package kind + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaUpdatesAddFieldKindBlob(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind blob (13)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 13} } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindBlobWithCreate(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind blob (13) with create", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 13} } + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "foo": "00ff" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo + } + }`, + Results: []map[string]any{ + { + "name": "John", + "foo": "00ff", + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdatesAddFieldKindBlobSubstitutionWithCreate(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with kind blob substitution with create", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": "Blob"} } + ] + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "John", + "foo": "00ff" + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo + } + }`, + Results: []map[string]any{ + { + "name": "John", + "foo": "00ff", + }, + }, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/kind/invalid_test.go b/tests/integration/schema/updates/add/field/kind/invalid_test.go index 5e578e5307..98f026ecc2 100644 --- a/tests/integration/schema/updates/add/field/kind/invalid_test.go +++ b/tests/integration/schema/updates/add/field/kind/invalid_test.go @@ -64,30 +64,6 @@ func TestSchemaUpdatesAddFieldKind9(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKind13(t *testing.T) { - test := testUtils.TestCase{ - Description: "Test schema update, add field with kind deprecated (13)", - Actions: []any{ - testUtils.SchemaUpdate{ - Schema: ` - type Users { - name: String - } - `, - }, - testUtils.SchemaPatch{ - Patch: ` - [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 13} } - ] - `, - ExpectedError: "no type found for given name. Type: 13", - }, - }, - } - testUtils.ExecuteTestCase(t, test) -} - func TestSchemaUpdatesAddFieldKind14(t *testing.T) { test := testUtils.TestCase{ Description: "Test schema update, add field with kind deprecated (14)", From eda5b6bd41e906a1adbfb3d9d81aac48deed0cd7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Dec 2023 14:27:32 -0500 Subject: [PATCH 18/60] bot: Bump @typescript-eslint/eslint-plugin from 6.13.1 to 6.13.2 in /playground (#2109) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) from 6.13.1 to 6.13.2.
Release notes

Sourced from @​typescript-eslint/eslint-plugin's releases.

v6.13.2

6.13.2 (2023-12-04)

Note: Version bump only for package @​typescript-eslint/typescript-eslint

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/eslint-plugin's changelog.

6.13.2 (2023-12-04)

Note: Version bump only for package @​typescript-eslint/eslint-plugin

You can read about our versioning strategy and releases on our website.

Commits
  • cc2c6d3 chore: publish v6.13.2
  • 37f34f4 docs: add 'When Not To Use It' or an intentional omission notice on all rules...
  • 96abf18 chore: finish enabling no-unnecessary-condition internally (#8004)
  • 4dae083 chore: prefix all nx package scripts with npx (#7988)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/eslint-plugin&package-manager=npm_and_yarn&previous-version=6.13.1&new-version=6.13.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 72 ++++++++++++++++++------------------ playground/package.json | 2 +- 2 files changed, 37 insertions(+), 37 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 76a32516fd..3dc2a67400 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,7 +18,7 @@ "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.13.1", + "@typescript-eslint/eslint-plugin": "^6.13.2", "@typescript-eslint/parser": "^6.13.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.55.0", @@ -2294,16 +2294,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.13.1.tgz", - "integrity": "sha512-5bQDGkXaxD46bPvQt08BUz9YSaO4S0fB1LB5JHQuXTfkGPI3+UUeS387C/e9jRie5GqT8u5kFTrMvAjtX4O5kA==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.13.2.tgz", + "integrity": "sha512-3+9OGAWHhk4O1LlcwLBONbdXsAhLjyCFogJY/cWy2lxdVJ2JrcTF2pTGMaLl2AE7U1l31n8Py4a8bx5DLf/0dQ==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.13.1", - "@typescript-eslint/type-utils": "6.13.1", - "@typescript-eslint/utils": "6.13.1", - "@typescript-eslint/visitor-keys": "6.13.1", + "@typescript-eslint/scope-manager": "6.13.2", + "@typescript-eslint/type-utils": "6.13.2", + "@typescript-eslint/utils": "6.13.2", + "@typescript-eslint/visitor-keys": "6.13.2", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2431,13 +2431,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.1.tgz", - "integrity": "sha512-BW0kJ7ceiKi56GbT2KKzZzN+nDxzQK2DS6x0PiSMPjciPgd/JRQGMibyaN2cPt2cAvuoH0oNvn2fwonHI+4QUQ==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.2.tgz", + "integrity": "sha512-CXQA0xo7z6x13FeDYCgBkjWzNqzBn8RXaE3QVQVIUm74fWJLkJkaHmHdKStrxQllGh6Q4eUGyNpMe0b1hMkXFA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.13.1", - "@typescript-eslint/visitor-keys": "6.13.1" + "@typescript-eslint/types": "6.13.2", + "@typescript-eslint/visitor-keys": "6.13.2" }, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2448,13 +2448,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.13.1.tgz", - "integrity": "sha512-A2qPlgpxx2v//3meMqQyB1qqTg1h1dJvzca7TugM3Yc2USDY+fsRBiojAEo92HO7f5hW5mjAUF6qobOPzlBCBQ==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.13.2.tgz", + "integrity": "sha512-Qr6ssS1GFongzH2qfnWKkAQmMUyZSyOr0W54nZNU1MDfo+U4Mv3XveeLZzadc/yq8iYhQZHYT+eoXJqnACM1tw==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.13.1", - "@typescript-eslint/utils": "6.13.1", + "@typescript-eslint/typescript-estree": "6.13.2", + "@typescript-eslint/utils": "6.13.2", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2475,9 +2475,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.13.1.tgz", - "integrity": "sha512-gjeEskSmiEKKFIbnhDXUyiqVma1gRCQNbVZ1C8q7Zjcxh3WZMbzWVfGE9rHfWd1msQtPS0BVD9Jz9jded44eKg==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.13.2.tgz", + "integrity": "sha512-7sxbQ+EMRubQc3wTfTsycgYpSujyVbI1xw+3UMRUcrhSy+pN09y/lWzeKDbvhoqcRbHdc+APLs/PWYi/cisLPg==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2488,13 +2488,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.1.tgz", - "integrity": "sha512-sBLQsvOC0Q7LGcUHO5qpG1HxRgePbT6wwqOiGLpR8uOJvPJbfs0mW3jPA3ujsDvfiVwVlWUDESNXv44KtINkUQ==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.2.tgz", + "integrity": "sha512-SuD8YLQv6WHnOEtKv8D6HZUzOub855cfPnPMKvdM/Bh1plv1f7Q/0iFUDLKKlxHcEstQnaUU4QZskgQq74t+3w==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.13.1", - "@typescript-eslint/visitor-keys": "6.13.1", + "@typescript-eslint/types": "6.13.2", + "@typescript-eslint/visitor-keys": "6.13.2", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2515,17 +2515,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.13.1.tgz", - "integrity": "sha512-ouPn/zVoan92JgAegesTXDB/oUp6BP1v8WpfYcqh649ejNc9Qv+B4FF2Ff626kO1xg0wWwwG48lAJ4JuesgdOw==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.13.2.tgz", + "integrity": "sha512-b9Ptq4eAZUym4idijCRzl61oPCwwREcfDI8xGk751Vhzig5fFZR9CyzDz4Sp/nxSLBYxUPyh4QdIDqWykFhNmQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.13.1", - "@typescript-eslint/types": "6.13.1", - "@typescript-eslint/typescript-estree": "6.13.1", + "@typescript-eslint/scope-manager": "6.13.2", + "@typescript-eslint/types": "6.13.2", + "@typescript-eslint/typescript-estree": "6.13.2", "semver": "^7.5.4" }, "engines": { @@ -2540,12 +2540,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.13.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.1.tgz", - "integrity": "sha512-NDhQUy2tg6XGNBGDRm1XybOHSia8mcXmlbKWoQP+nm1BIIMxa55shyJfZkHpEBN62KNPLrocSM2PdPcaLgDKMQ==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.2.tgz", + "integrity": "sha512-OGznFs0eAQXJsp+xSd6k/O1UbFi/K/L7WjqeRoFE7vadjAF9y0uppXhYNQNEqygjou782maGClOoZwPqF0Drlw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.13.1", + "@typescript-eslint/types": "6.13.2", "eslint-visitor-keys": "^3.4.1" }, "engines": { diff --git a/playground/package.json b/playground/package.json index 7a6315b02a..fa7d5a2f90 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,7 +20,7 @@ "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.13.1", + "@typescript-eslint/eslint-plugin": "^6.13.2", "@typescript-eslint/parser": "^6.13.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.55.0", From 48d0c2496a22c5f48207867fcff741303090f4d6 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Thu, 7 Dec 2023 21:20:47 -0500 Subject: [PATCH 19/60] refactor: Simplify Merkle CRDT workflow (#2111) ## Relevant issue(s) Resolves #2110 Possibly Resolves #917 ## Description This PR aims to simplify the CRDT packages ahead of the new CRDT types. --- client/errors.go | 9 ++ core/crdt/base.go | 21 ++- core/crdt/base_test.go | 4 +- core/crdt/composite.go | 35 +---- core/crdt/lwwreg.go | 40 +----- core/replicated.go | 6 - db/collection.go | 102 +++++--------- db/collection_delete.go | 3 +- db/db.go | 6 - db/errors.go | 1 - db/fetcher/versioned.go | 10 +- merkle/crdt/composite.go | 65 ++------- merkle/crdt/errors.go | 19 --- merkle/crdt/factory.go | 162 ---------------------- merkle/crdt/factory_test.go | 238 --------------------------------- merkle/crdt/lwwreg.go | 63 +-------- merkle/crdt/merklecrdt.go | 55 +++++--- merkle/crdt/merklecrdt_test.go | 4 +- net/process.go | 38 +++--- 19 files changed, 146 insertions(+), 735 deletions(-) delete mode 100644 merkle/crdt/errors.go delete mode 100644 merkle/crdt/factory.go delete mode 100644 merkle/crdt/factory_test.go diff --git a/client/errors.go b/client/errors.go index 048d96c00d..28161c502d 100644 --- a/client/errors.go +++ b/client/errors.go @@ -24,6 +24,7 @@ const ( errMaxTxnRetries string = "reached maximum transaction reties" errRelationOneSided string = "relation must be defined on both schemas" errCollectionNotFound string = "collection not found" + errUnknownCRDT string = "unknown crdt" ) // Errors returnable from this package. @@ -47,6 +48,7 @@ var ( ErrMaxTxnRetries = errors.New(errMaxTxnRetries) ErrRelationOneSided = errors.New(errRelationOneSided) ErrCollectionNotFound = errors.New(errCollectionNotFound) + ErrUnknownCRDT = errors.New(errUnknownCRDT) ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. @@ -123,3 +125,10 @@ func NewErrCollectionNotFoundForSchema(schemaRoot string) error { errors.NewKV("SchemaRoot", schemaRoot), ) } + +func NewErrUnknownCRDT(cType CType) error { + return errors.New( + errUnknownCRDT, + errors.NewKV("Type", cType), + ) +} diff --git a/core/crdt/base.go b/core/crdt/base.go index d24b263645..a0d8b5375f 100644 --- a/core/crdt/base.go +++ b/core/crdt/base.go @@ -28,13 +28,26 @@ import ( type baseCRDT struct { store datastore.DSReaderWriter key core.DataStoreKey + + // schemaVersionKey is the schema version datastore key at the time of commit. + // + // It can be used to identify the collection datastructure state at the time of commit. + schemaVersionKey core.CollectionSchemaVersionKey + + fieldName string } -// @TODO paramaterize ns/suffix -func newBaseCRDT(store datastore.DSReaderWriter, key core.DataStoreKey) baseCRDT { +func newBaseCRDT( + store datastore.DSReaderWriter, + key core.DataStoreKey, + schemaVersionKey core.CollectionSchemaVersionKey, + fieldName string, +) baseCRDT { return baseCRDT{ - store: store, - key: key, + store: store, + key: key, + schemaVersionKey: schemaVersionKey, + fieldName: fieldName, } } diff --git a/core/crdt/base_test.go b/core/crdt/base_test.go index 5fd7d9248e..e69d69f05e 100644 --- a/core/crdt/base_test.go +++ b/core/crdt/base_test.go @@ -29,11 +29,11 @@ func newSeededDS() datastore.DSReaderWriter { } func exampleBaseCRDT() baseCRDT { - return newBaseCRDT(newSeededDS(), core.DataStoreKey{}) + return newBaseCRDT(newSeededDS(), core.DataStoreKey{}, core.CollectionSchemaVersionKey{}, "") } func TestBaseCRDTNew(t *testing.T) { - base := newBaseCRDT(newDS(), core.DataStoreKey{}) + base := newBaseCRDT(newDS(), core.DataStoreKey{}, core.CollectionSchemaVersionKey{}, "") if base.store == nil { t.Error("newBaseCRDT needs to init store") } diff --git a/core/crdt/composite.go b/core/crdt/composite.go index 68f7824329..761cc07828 100644 --- a/core/crdt/composite.go +++ b/core/crdt/composite.go @@ -29,11 +29,6 @@ import ( "github.com/sourcenetwork/defradb/errors" ) -var ( - _ core.ReplicatedData = (*CompositeDAG)(nil) - _ core.CompositeDelta = (*CompositeDAGDelta)(nil) -) - // CompositeDAGDelta represents a delta-state update made of sub-MerkleCRDTs. type CompositeDAGDelta struct { // SchemaVersionID is the schema version datastore key at the time of commit. @@ -51,6 +46,8 @@ type CompositeDAGDelta struct { FieldName string } +var _ core.CompositeDelta = (*CompositeDAGDelta)(nil) + // GetPriority gets the current priority for this delta. func (delta *CompositeDAGDelta) GetPriority() uint64 { return delta.Priority @@ -92,39 +89,21 @@ func (delta *CompositeDAGDelta) Links() []core.DAGLink { // CompositeDAG is a CRDT structure that is used to track a collection of sub MerkleCRDTs. type CompositeDAG struct { - store datastore.DSReaderWriter - key core.DataStoreKey - // schemaVersionKey is the schema version datastore key at the time of commit. - // - // It can be used to identify the collection datastructure state at time of commit. - schemaVersionKey core.CollectionSchemaVersionKey - - fieldName string + baseCRDT } -var _ core.ReplicatedData = CompositeDAG{} +var _ core.ReplicatedData = (*CompositeDAG)(nil) func NewCompositeDAG( store datastore.DSReaderWriter, schemaVersionKey core.CollectionSchemaVersionKey, - namespace core.Key, key core.DataStoreKey, fieldName string, ) CompositeDAG { - return CompositeDAG{ - store: store, - key: key, - schemaVersionKey: schemaVersionKey, - fieldName: fieldName, - } -} - -// ID returns the schema ID of the composite DAG CRDT. -func (c CompositeDAG) ID() string { - return c.key.ToString() + return CompositeDAG{newBaseCRDT(store, key, schemaVersionKey, fieldName)} } -// Value returns the schema ID of the composite DAG CRDT. +// Value is a no-op for a CompositeDAG. func (c CompositeDAG) Value(ctx context.Context) ([]byte, error) { return nil, nil } @@ -226,7 +205,7 @@ func (c CompositeDAG) deleteWithPrefix(ctx context.Context, key core.DataStoreKe } // DeltaDecode is a typed helper to extract. -// a LWWRegDelta from a ipld.Node +// a CompositeDAGDelta from a ipld.Node // for now let's do cbor (quick to implement) func (c CompositeDAG) DeltaDecode(node ipld.Node) (core.Delta, error) { delta := &CompositeDAGDelta{} diff --git a/core/crdt/lwwreg.go b/core/crdt/lwwreg.go index 60df739319..18979c1bfb 100644 --- a/core/crdt/lwwreg.go +++ b/core/crdt/lwwreg.go @@ -26,12 +26,6 @@ import ( "github.com/sourcenetwork/defradb/errors" ) -var ( - // ensure types implements core interfaces - _ core.ReplicatedData = (*LWWRegister)(nil) - _ core.Delta = (*LWWRegDelta)(nil) -) - // LWWRegDelta is a single delta operation for an LWWRegister // @todo: Expand delta metadata (investigate if needed) type LWWRegDelta struct { @@ -42,6 +36,8 @@ type LWWRegDelta struct { FieldName string } +var _ core.Delta = (*LWWRegDelta)(nil) + // GetPriority gets the current priority for this delta. func (delta *LWWRegDelta) GetPriority() uint64 { return delta.Priority @@ -79,15 +75,10 @@ func (delta *LWWRegDelta) Value() any { // of an arbitrary data type that ensures convergence. type LWWRegister struct { baseCRDT - - // schemaVersionKey is the schema version datastore key at the time of commit. - // - // It can be used to identify the collection datastructure state at time of commit. - schemaVersionKey core.CollectionSchemaVersionKey - - fieldName string } +var _ core.ReplicatedData = (*LWWRegister)(nil) + // NewLWWRegister returns a new instance of the LWWReg with the given ID. func NewLWWRegister( store datastore.DSReaderWriter, @@ -95,15 +86,7 @@ func NewLWWRegister( key core.DataStoreKey, fieldName string, ) LWWRegister { - return LWWRegister{ - baseCRDT: newBaseCRDT(store, key), - schemaVersionKey: schemaVersionKey, - fieldName: fieldName, - // id: id, - // data: data, - // ts: ts, - // clock: clock, - } + return LWWRegister{newBaseCRDT(store, key, schemaVersionKey, fieldName)} } // Value gets the current register value @@ -120,7 +103,6 @@ func (reg LWWRegister) Value(ctx context.Context) ([]byte, error) { // Set generates a new delta with the supplied value // RETURN DELTA func (reg LWWRegister) Set(value []byte) *LWWRegDelta { - // return NewLWWRegister(reg.id, value, reg.clock.Apply(), reg.clock) return &LWWRegDelta{ Data: value, DocKey: []byte(reg.key.DocKey), @@ -129,18 +111,6 @@ func (reg LWWRegister) Set(value []byte) *LWWRegDelta { } } -func (reg LWWRegister) ID() string { - return reg.key.ToString() -} - -// RETURN DELTA -// func (reg LWWRegister) setWithClock(value []byte, clock Clock) LWWRegDelta { -// // return NewLWWRegister(reg.id, value, clock.Apply(), clock) -// return LWWRegDelta{ -// data: value, -// } -// } - // Merge implements ReplicatedData interface // Merge two LWWRegisty based on the order of the timestamp (ts), // if they are equal, compare IDs diff --git a/core/replicated.go b/core/replicated.go index 86d0523e42..75a72ece7f 100644 --- a/core/replicated.go +++ b/core/replicated.go @@ -20,7 +20,6 @@ import ( // ReplicatedData is a data type that allows concurrent writers to deterministically merge other // replicated data so as to converge on the same state. type ReplicatedData interface { - ID() string Merge(ctx context.Context, other Delta) error DeltaDecode(node ipld.Node) (Delta, error) // possibly rename to just Decode Value(ctx context.Context) ([]byte, error) @@ -31,8 +30,3 @@ type PersistedReplicatedData interface { ReplicatedData Publish(Delta) (cid.Cid, error) } - -// type EmbedableReplicatedData interface { -// ReplicatedData -// Apply(Operation) error -// } diff --git a/db/collection.go b/db/collection.go index b4586be89b..65b0fbaa22 100644 --- a/db/collection.go +++ b/db/collection.go @@ -34,7 +34,7 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/lens" - "github.com/sourcenetwork/defradb/merkle/crdt" + merklecrdt "github.com/sourcenetwork/defradb/merkle/crdt" ) var _ client.Collection = (*collection)(nil) @@ -973,7 +973,7 @@ func (c *collection) save( return cid.Undef, err } - node, _, err := c.saveDocValue(ctx, txn, fieldKey, val) + node, _, err := c.saveFieldToMerkleCRDT(ctx, txn, fieldKey, val) if err != nil { return cid.Undef, err } @@ -1000,11 +1000,10 @@ func (c *collection) save( return cid.Undef, nil } - headNode, priority, err := c.saveValueToMerkleCRDT( + headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, txn, primaryKey.ToDataStoreKey(), - client.COMPOSITE, buf, links, client.Active, @@ -1179,7 +1178,7 @@ func (c *collection) exists( return true, false, nil } -func (c *collection) saveDocValue( +func (c *collection) saveFieldToMerkleCRDT( ctx context.Context, txn datastore.Txn, key core.DataStoreKey, @@ -1201,20 +1200,7 @@ func (c *collection) saveDocValue( return nil, 0, err } } - return c.saveValueToMerkleCRDT(ctx, txn, key, client.LWW_REGISTER, bytes) - default: - return nil, 0, ErrUnknownCRDT - } -} -func (c *collection) saveValueToMerkleCRDT( - ctx context.Context, - txn datastore.Txn, - key core.DataStoreKey, - ctype client.CType, - args ...any) (ipld.Node, uint64, error) { - switch ctype { - case client.LWW_REGISTER: fieldID, err := strconv.Atoi(key.FieldId) if err != nil { return nil, 0, err @@ -1227,68 +1213,40 @@ func (c *collection) saveValueToMerkleCRDT( return nil, 0, client.NewErrFieldIndexNotExist(fieldID) } - merkleCRDT, err := c.db.crdtFactory.InstanceWithStores( + merkleCRDT := merklecrdt.NewMerkleLWWRegister( txn, core.NewCollectionSchemaVersionKey(schema.VersionID, c.ID()), - c.db.events.Updates, - ctype, key, field.Name, ) - if err != nil { - return nil, 0, err - } - var bytes []byte - // parse args - if len(args) != 1 { - return nil, 0, ErrUnknownCRDTArgument - } - bytes, ok = args[0].([]byte) - if !ok { - return nil, 0, ErrUnknownCRDTArgument - } - lwwreg := merkleCRDT.(*crdt.MerkleLWWRegister) - return lwwreg.Set(ctx, bytes) - case client.COMPOSITE: - key = key.WithFieldId(core.COMPOSITE_NAMESPACE) - merkleCRDT, err := c.db.crdtFactory.InstanceWithStores( - txn, - core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), - c.db.events.Updates, - ctype, - key, - "", - ) - if err != nil { - return nil, 0, err - } + return merkleCRDT.Set(ctx, bytes) + default: + return nil, 0, client.NewErrUnknownCRDT(val.Type()) + } +} - // parse args - if len(args) < 2 { - return nil, 0, ErrUnknownCRDTArgument - } - bytes, ok := args[0].([]byte) - if !ok { - return nil, 0, ErrUnknownCRDTArgument - } - links, ok := args[1].([]core.DAGLink) - if !ok { - return nil, 0, ErrUnknownCRDTArgument - } - comp := merkleCRDT.(*crdt.MerkleCompositeDAG) - if len(args) > 2 { - status, ok := args[2].(client.DocumentStatus) - if !ok { - return nil, 0, ErrUnknownCRDTArgument - } - if status.IsDeleted() { - return comp.Delete(ctx, links) - } - } - return comp.Set(ctx, bytes, links) +func (c *collection) saveCompositeToMerkleCRDT( + ctx context.Context, + txn datastore.Txn, + key core.DataStoreKey, + buf []byte, + links []core.DAGLink, + status client.DocumentStatus, +) (ipld.Node, uint64, error) { + key = key.WithFieldId(core.COMPOSITE_NAMESPACE) + merkleCRDT := merklecrdt.NewMerkleCompositeDAG( + txn, + core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), + key, + "", + ) + + if status.IsDeleted() { + return merkleCRDT.Delete(ctx, links) } - return nil, 0, ErrUnknownCRDT + + return merkleCRDT.Set(ctx, buf, links) } // getTxn gets or creates a new transaction from the underlying db. diff --git a/db/collection_delete.go b/db/collection_delete.go index 7f6a968a97..afa7d64a92 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -261,11 +261,10 @@ func (c *collection) applyDelete( } } - headNode, priority, err := c.saveValueToMerkleCRDT( + headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, txn, dsKey, - client.COMPOSITE, []byte{}, dagLinks, client.Deleted, diff --git a/db/db.go b/db/db.go index f2f59ecdaf..1046b2db54 100644 --- a/db/db.go +++ b/db/db.go @@ -31,7 +31,6 @@ import ( "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/lens" "github.com/sourcenetwork/defradb/logging" - "github.com/sourcenetwork/defradb/merkle/crdt" "github.com/sourcenetwork/defradb/request/graphql" ) @@ -56,8 +55,6 @@ type db struct { rootstore datastore.RootStore multistore datastore.MultiStore - crdtFactory *crdt.Factory - events events.Events parser core.Parser @@ -114,7 +111,6 @@ func NewDB(ctx context.Context, rootstore datastore.RootStore, options ...Option func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option) (*implicitTxnDB, error) { log.Debug(ctx, "Loading: internal datastores") multistore := datastore.MultiStoreFrom(rootstore) - crdtFactory := crdt.DefaultFactory.WithStores(multistore) parser, err := graphql.NewParser() if err != nil { @@ -125,8 +121,6 @@ func newDB(ctx context.Context, rootstore datastore.RootStore, options ...Option rootstore: rootstore, multistore: multistore, - crdtFactory: &crdtFactory, - parser: parser, options: options, } diff --git a/db/errors.go b/db/errors.go index 651bcbe42b..17e82c6738 100644 --- a/db/errors.go +++ b/db/errors.go @@ -111,7 +111,6 @@ var ( ErrDocumentAlreadyExists = errors.New(errDocumentAlreadyExists) ErrDocumentDeleted = errors.New(errDocumentDeleted) ErrUnknownCRDTArgument = errors.New("invalid CRDT arguments") - ErrUnknownCRDT = errors.New("unknown crdt") ErrCollectionAlreadyExists = errors.New("collection already exists") ErrCollectionNameEmpty = errors.New("collection name can't be empty") ErrSchemaNameEmpty = errors.New("schema name can't be empty") diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 4ab8ef54a7..454bcf17c6 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -25,8 +25,7 @@ import ( "github.com/sourcenetwork/defradb/datastore/memory" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/events" - "github.com/sourcenetwork/defradb/merkle/crdt" + merklecrdt "github.com/sourcenetwork/defradb/merkle/crdt" "github.com/sourcenetwork/defradb/planner/mapper" ) @@ -94,7 +93,7 @@ type VersionedFetcher struct { col client.Collection // @todo index *client.IndexDescription - mCRDTs map[uint32]crdt.MerkleCRDT + mCRDTs map[uint32]merklecrdt.MerkleCRDT } // Init initializes the VersionedFetcher. @@ -110,7 +109,7 @@ func (vf *VersionedFetcher) Init( ) error { vf.col = col vf.queuedCids = list.New() - vf.mCRDTs = make(map[uint32]crdt.MerkleCRDT) + vf.mCRDTs = make(map[uint32]merklecrdt.MerkleCRDT) vf.txn = txn // create store @@ -385,10 +384,9 @@ func (vf *VersionedFetcher) processNode( if err != nil { return err } - mcrdt, err = crdt.DefaultFactory.InstanceWithStores( + mcrdt, err = merklecrdt.InstanceWithStore( vf.store, core.CollectionSchemaVersionKey{}, - events.EmptyUpdateChannel, ctype, key, fieldName, diff --git a/merkle/crdt/composite.go b/merkle/crdt/composite.go index 704c65fcd0..f837ac3ef7 100644 --- a/merkle/crdt/composite.go +++ b/merkle/crdt/composite.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package crdt +package merklecrdt import ( "context" @@ -18,42 +18,9 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" corecrdt "github.com/sourcenetwork/defradb/core/crdt" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/merkle/clock" ) -var ( - compFactoryFn = MerkleCRDTFactory( - func( - mstore datastore.MultiStore, - schemaRoot core.CollectionSchemaVersionKey, - uCh events.UpdateChannel, - fieldName string, - ) MerkleCRDTInitFn { - return func(key core.DataStoreKey) MerkleCRDT { - return NewMerkleCompositeDAG( - mstore.Datastore(), - mstore.Headstore(), - mstore.DAGstore(), - schemaRoot, - uCh, - core.DataStoreKey{}, - key, - fieldName, - ) - } - }, - ) -) - -func init() { - err := DefaultFactory.Register(client.COMPOSITE, &compFactoryFn) - if err != nil { - panic(err) - } -} - // MerkleCompositeDAG is a MerkleCRDT implementation of the CompositeDAG using MerkleClocks. type MerkleCompositeDAG struct { *baseMerkleCRDT @@ -64,25 +31,20 @@ type MerkleCompositeDAG struct { // NewMerkleCompositeDAG creates a new instance (or loaded from DB) of a MerkleCRDT // backed by a CompositeDAG CRDT. func NewMerkleCompositeDAG( - datastore datastore.DSReaderWriter, - headstore datastore.DSReaderWriter, - dagstore datastore.DAGStore, + store Stores, schemaVersionKey core.CollectionSchemaVersionKey, - uCh events.UpdateChannel, - ns, key core.DataStoreKey, fieldName string, ) *MerkleCompositeDAG { compositeDag := corecrdt.NewCompositeDAG( - datastore, + store.Datastore(), schemaVersionKey, - ns, - key, /* stuff like namespace and ID */ + key, fieldName, ) - clock := clock.NewMerkleClock(headstore, dagstore, key.ToHeadStoreKey(), compositeDag) - base := &baseMerkleCRDT{clock: clock, crdt: compositeDag, updateChannel: uCh} + clock := clock.NewMerkleClock(store.Headstore(), store.DAGstore(), key.ToHeadStoreKey(), compositeDag) + base := &baseMerkleCRDT{clock: clock, crdt: compositeDag} return &MerkleCompositeDAG{ baseMerkleCRDT: base, @@ -100,7 +62,7 @@ func (m *MerkleCompositeDAG) Delete( log.Debug(ctx, "Applying delta-mutator 'Delete' on CompositeDAG") delta := m.reg.Set([]byte{}, links) delta.Status = client.Deleted - nd, err := m.Publish(ctx, delta) + nd, err := m.clock.AddDAGNode(ctx, delta) if err != nil { return nil, 0, err } @@ -118,21 +80,10 @@ func (m *MerkleCompositeDAG) Set( // persist/publish delta log.Debug(ctx, "Applying delta-mutator 'Set' on CompositeDAG") delta := m.reg.Set(patch, links) - nd, err := m.Publish(ctx, delta) + nd, err := m.clock.AddDAGNode(ctx, delta) if err != nil { return nil, 0, err } return nd, delta.GetPriority(), nil } - -// Value is a no-op for a CompositeDAG. -func (m *MerkleCompositeDAG) Value(ctx context.Context) ([]byte, error) { - return m.reg.Value(ctx) -} - -// Merge writes the provided delta to state using a supplied merge semantic. -// @todo -func (m *MerkleCompositeDAG) Merge(ctx context.Context, other core.Delta) error { - return m.reg.Merge(ctx, other) -} diff --git a/merkle/crdt/errors.go b/merkle/crdt/errors.go deleted file mode 100644 index e33ec97a12..0000000000 --- a/merkle/crdt/errors.go +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package crdt - -import ( - "github.com/sourcenetwork/defradb/errors" -) - -var ( - ErrFactoryTypeNoExist = errors.New("no such factory for the given type exists") -) diff --git a/merkle/crdt/factory.go b/merkle/crdt/factory.go deleted file mode 100644 index 04dc3d5aef..0000000000 --- a/merkle/crdt/factory.go +++ /dev/null @@ -1,162 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package crdt - -import ( - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" -) - -// MerkleCRDTInitFn instantiates a MerkleCRDT with a given key. -type MerkleCRDTInitFn func(core.DataStoreKey) MerkleCRDT - -// MerkleCRDTFactory instantiates a MerkleCRDTInitFn with a MultiStore. -// Returns a MerkleCRDTInitFn with all the necessary stores set. -type MerkleCRDTFactory func( - mstore datastore.MultiStore, - schemaVersionKey core.CollectionSchemaVersionKey, - uCh events.UpdateChannel, - fieldName string, -) MerkleCRDTInitFn - -// Factory is a helper utility for instantiating new MerkleCRDTs. -// It removes some of the overhead of having to coordinate all the various -// store parameters on every single new MerkleCRDT creation. -type Factory struct { - crdts map[client.CType]*MerkleCRDTFactory - multistore datastore.MultiStore -} - -var ( - // DefaultFactory is instantiated with no stores - // It is recommended to use this only after you call - // WithStores(...) so you get a new non-shared instance - DefaultFactory = NewFactory(nil) -) - -// NewFactory returns a newly instantiated factory object with the assigned stores. -// It may be called with all stores set to nil. -func NewFactory(multistore datastore.MultiStore) *Factory { - return &Factory{ - crdts: make(map[client.CType]*MerkleCRDTFactory), - multistore: multistore, - } -} - -// Register creates a new entry in the CRDTs map to register a factory function -// to a MerkleCRDT Type. -func (factory *Factory) Register(t client.CType, fn *MerkleCRDTFactory) error { - factory.crdts[t] = fn - return nil -} - -// Instance and execute the registered factory function for a given MerkleCRDT type -// supplied with all the current stores (passed in as a datastore.MultiStore object). -func (factory Factory) Instance( - schemaVersionKey core.CollectionSchemaVersionKey, - uCh events.UpdateChannel, - t client.CType, - key core.DataStoreKey, - fieldName string, -) (MerkleCRDT, error) { - // get the factory function for the given MerkleCRDT type - // and pass in the current factory state as a MultiStore parameter - fn, err := factory.getRegisteredFactory(t) - if err != nil { - return nil, err - } - return (*fn)(factory, schemaVersionKey, uCh, fieldName)(key), nil -} - -// InstanceWithStore executes the registered factory function for the given MerkleCRDT type -// with the additional supplied datastore.MultiStore instead of the saved one on the main Factory. -func (factory Factory) InstanceWithStores( - store datastore.MultiStore, - schemaVersionKey core.CollectionSchemaVersionKey, - uCh events.UpdateChannel, - t client.CType, - key core.DataStoreKey, - fieldName string, -) (MerkleCRDT, error) { - fn, err := factory.getRegisteredFactory(t) - if err != nil { - return nil, err - } - - return (*fn)(store, schemaVersionKey, uCh, fieldName)(key), nil -} - -func (factory Factory) getRegisteredFactory(t client.CType) (*MerkleCRDTFactory, error) { - fn, exists := factory.crdts[t] - if !exists { - return nil, ErrFactoryTypeNoExist - } - return fn, nil -} - -// SetStores sets all the current stores on the Factory in one call. -func (factory *Factory) SetStores(multistore datastore.MultiStore) error { - factory.multistore = multistore - return nil -} - -// WithStores returns a new instance of the Factory with all the stores set. -func (factory Factory) WithStores(multistore datastore.MultiStore) Factory { - factory.multistore = multistore - return factory -} - -// Rootstore implements MultiStore. -func (factory Factory) Rootstore() datastore.DSReaderWriter { - return nil -} - -// Data implements datastore.MultiStore and returns the current Datastore. -func (factory Factory) Datastore() datastore.DSReaderWriter { - if factory.multistore == nil { - return nil - } - return factory.multistore.Datastore() -} - -// Head implements datastore.MultiStore and returns the current Headstore. -func (factory Factory) Headstore() datastore.DSReaderWriter { - if factory.multistore == nil { - return nil - } - return factory.multistore.Headstore() -} - -// Peerstore implements datastore.MultiStore and returns the current Peerstore. -func (factory Factory) Peerstore() datastore.DSBatching { - if factory.multistore == nil { - return nil - } - return factory.multistore.Peerstore() -} - -// Head implements datastore.MultiStore and returns the current Headstore. -func (factory Factory) Systemstore() datastore.DSReaderWriter { - if factory.multistore == nil { - return nil - } - return factory.multistore.Systemstore() -} - -// DAGstore implements datastore.MultiStore and returns the current DAGstore. -func (factory Factory) DAGstore() datastore.DAGStore { - if factory.multistore == nil { - return nil - } - return factory.multistore.DAGstore() -} diff --git a/merkle/crdt/factory_test.go b/merkle/crdt/factory_test.go deleted file mode 100644 index 10e2f5c672..0000000000 --- a/merkle/crdt/factory_test.go +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright 2022 Democratized Data Foundation -// -// Use of this software is governed by the Business Source License -// included in the file licenses/BSL.txt. -// -// As of the Change Date specified in that file, in accordance with -// the Business Source License, use of this software will be governed -// by the Apache License, Version 2.0, included in the file -// licenses/APL.txt. - -package crdt - -import ( - "context" - "testing" - - ds "github.com/ipfs/go-datastore" - "github.com/stretchr/testify/assert" - - "github.com/sourcenetwork/defradb/client" - "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" -) - -func newStores() datastore.MultiStore { - root := ds.NewMapDatastore() - return datastore.MultiStoreFrom(root) -} - -func TestNewBlankFactory(t *testing.T) { - f := NewFactory(nil) - if f == nil { - t.Fatal("Returned factory is a nil pointer") - } -} - -func TestNewFactoryWithStores(t *testing.T) { - m := newStores() - f := NewFactory(m) - if f == nil { - t.Fatal("Returned factory is a nil pointer") - } - - assert.Equal(t, m.Datastore(), f.Datastore()) - assert.Equal(t, m.Headstore(), f.Headstore()) - assert.Equal(t, m.DAGstore(), f.DAGstore()) - assert.Equal(t, m.Systemstore(), f.Systemstore()) -} - -func TestFactoryMultiStoreInterface(t *testing.T) { - m := newStores() - f := NewFactory(m) - if f == nil { - t.Fatal("Returned factory is a nil pointer") - } - - // check interface implement - var _ datastore.MultiStore = f - // ms = f - - // check interface functions - assert.Equal(t, m.Datastore(), f.Datastore()) - assert.Equal(t, m.Headstore(), f.Headstore()) - assert.Equal(t, m.DAGstore(), f.DAGstore()) - assert.Equal(t, m.Systemstore(), f.Systemstore()) -} - -func TestFactorySetStores(t *testing.T) { - f := NewFactory(nil) - m := newStores() - err := f.SetStores(m) - assert.Nil(t, err) - - assert.Equal(t, m.Datastore(), f.Datastore()) - assert.Equal(t, m.Headstore(), f.Headstore()) - assert.Equal(t, m.DAGstore(), f.DAGstore()) - assert.Equal(t, m.Systemstore(), f.Systemstore()) -} - -func TestFactoryWithStores(t *testing.T) { - f := NewFactory(nil) - m := newStores() - f2 := f.WithStores(m) - // assert.NotEmpty - - assert.Nil(t, f.Datastore()) - assert.Nil(t, f.Headstore()) - assert.Nil(t, f.DAGstore()) - - assert.Equal(t, m.Datastore(), f2.Datastore()) - assert.Equal(t, m.Headstore(), f2.Headstore()) - assert.Equal(t, m.DAGstore(), f2.DAGstore()) - assert.Equal(t, m.Systemstore(), f2.Systemstore()) -} - -func TestFullFactoryRegister(t *testing.T) { - m := newStores() - f := NewFactory(m) - err := f.Register(client.LWW_REGISTER, &lwwFactoryFn) - assert.Nil(t, err) - assert.Equal(t, &lwwFactoryFn, f.crdts[client.LWW_REGISTER]) -} - -func TestBlankFactoryRegister(t *testing.T) { - f := NewFactory(nil) - err := f.Register(client.LWW_REGISTER, &lwwFactoryFn) - assert.Nil(t, err) - assert.Equal(t, &lwwFactoryFn, f.crdts[client.LWW_REGISTER]) -} - -func TestWithStoresFactoryRegister(t *testing.T) { - f := NewFactory(nil) - f.Register(client.LWW_REGISTER, &lwwFactoryFn) - m := newStores() - f2 := f.WithStores(m) - - assert.Equal(t, &lwwFactoryFn, f2.crdts[client.LWW_REGISTER]) -} - -func TestDefaultFactory(t *testing.T) { - assert.NotNil(t, DefaultFactory) - assert.Equal(t, &lwwFactoryFn, DefaultFactory.crdts[client.LWW_REGISTER]) -} - -func TestFactoryInstanceMissing(t *testing.T) { - m := newStores() - f := NewFactory(m) - - _, err := f.Instance( - core.CollectionSchemaVersionKey{}, - events.EmptyUpdateChannel, - client.LWW_REGISTER, - core.MustNewDataStoreKey("/1/0/MyKey"), - "", - ) - assert.Equal(t, err, ErrFactoryTypeNoExist) -} - -func TestBlankFactoryInstanceWithLWWRegister(t *testing.T) { - m := newStores() - f1 := NewFactory(nil) - f1.Register(client.LWW_REGISTER, &lwwFactoryFn) - f := f1.WithStores(m) - - crdt, err := f.Instance( - core.CollectionSchemaVersionKey{}, - events.EmptyUpdateChannel, - client.LWW_REGISTER, - core.MustNewDataStoreKey("/1/0/MyKey"), - "", - ) - assert.NoError(t, err) - - _, ok := crdt.(*MerkleLWWRegister) - assert.True(t, ok) -} - -func TestBlankFactoryInstanceWithCompositeRegister(t *testing.T) { - m := newStores() - f1 := NewFactory(nil) - f1.Register(client.COMPOSITE, &compFactoryFn) - f := f1.WithStores(m) - - crdt, err := f.Instance( - core.CollectionSchemaVersionKey{}, - events.EmptyUpdateChannel, - client.COMPOSITE, - core.MustNewDataStoreKey("/1/0/MyKey"), - "", - ) - assert.NoError(t, err) - - _, ok := crdt.(*MerkleCompositeDAG) - assert.True(t, ok) -} - -func TestFullFactoryInstanceLWWRegister(t *testing.T) { - m := newStores() - f := NewFactory(m) - f.Register(client.LWW_REGISTER, &lwwFactoryFn) - - crdt, err := f.Instance( - core.CollectionSchemaVersionKey{}, - events.EmptyUpdateChannel, - client.LWW_REGISTER, - core.MustNewDataStoreKey("/1/0/MyKey"), - "", - ) - assert.NoError(t, err) - - _, ok := crdt.(*MerkleLWWRegister) - assert.True(t, ok) -} - -func TestFullFactoryInstanceCompositeRegister(t *testing.T) { - m := newStores() - f := NewFactory(m) - f.Register(client.COMPOSITE, &compFactoryFn) - - crdt, err := f.Instance( - core.CollectionSchemaVersionKey{}, - events.EmptyUpdateChannel, - client.COMPOSITE, - core.MustNewDataStoreKey("/1/0/MyKey"), - "", - ) - assert.NoError(t, err) - - _, ok := crdt.(*MerkleCompositeDAG) - assert.True(t, ok) -} - -func TestLWWRegisterFactoryFn(t *testing.T) { - ctx := context.Background() - m := newStores() - f := NewFactory(m) // here factory is only needed to satisfy datastore.MultiStore interface - crdt := lwwFactoryFn(f, core.CollectionSchemaVersionKey{}, events.EmptyUpdateChannel, "")(core.MustNewDataStoreKey("/1/0/MyKey")) - - lwwreg, ok := crdt.(*MerkleLWWRegister) - assert.True(t, ok) - - _, _, err := lwwreg.Set(ctx, []byte("hi")) - assert.NoError(t, err) -} - -func TestCompositeRegisterFactoryFn(t *testing.T) { - ctx := context.Background() - m := newStores() - f := NewFactory(m) // here factory is only needed to satisfy datastore.MultiStore interface - crdt := compFactoryFn(f, core.CollectionSchemaVersionKey{}, events.EmptyUpdateChannel, "")(core.MustNewDataStoreKey("/1/0/MyKey")) - - merkleReg, ok := crdt.(*MerkleCompositeDAG) - assert.True(t, ok) - - _, _, err := merkleReg.Set(ctx, []byte("hi"), []core.DAGLink{}) - assert.NoError(t, err) -} diff --git a/merkle/crdt/lwwreg.go b/merkle/crdt/lwwreg.go index 796451c041..8b47492b26 100644 --- a/merkle/crdt/lwwreg.go +++ b/merkle/crdt/lwwreg.go @@ -8,55 +8,21 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package crdt +package merklecrdt import ( "context" ipld "github.com/ipfs/go-ipld-format" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" corecrdt "github.com/sourcenetwork/defradb/core/crdt" - "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/merkle/clock" ) -var ( - lwwFactoryFn = MerkleCRDTFactory( - func( - mstore datastore.MultiStore, - schemaRoot core.CollectionSchemaVersionKey, - _ events.UpdateChannel, - fieldName string, - ) MerkleCRDTInitFn { - return func(key core.DataStoreKey) MerkleCRDT { - return NewMerkleLWWRegister( - mstore.Datastore(), - mstore.Headstore(), - mstore.DAGstore(), - schemaRoot, - core.DataStoreKey{}, - key, - fieldName, - ) - } - }, - ) -) - -func init() { - err := DefaultFactory.Register(client.LWW_REGISTER, &lwwFactoryFn) - if err != nil { - panic(err) - } -} - // MerkleLWWRegister is a MerkleCRDT implementation of the LWWRegister using MerkleClocks. type MerkleLWWRegister struct { *baseMerkleCRDT - // core.ReplicatedData reg corecrdt.LWWRegister } @@ -64,20 +30,14 @@ type MerkleLWWRegister struct { // NewMerkleLWWRegister creates a new instance (or loaded from DB) of a MerkleCRDT // backed by a LWWRegister CRDT. func NewMerkleLWWRegister( - datastore datastore.DSReaderWriter, - headstore datastore.DSReaderWriter, - dagstore datastore.DAGStore, + store Stores, schemaVersionKey core.CollectionSchemaVersionKey, - ns, key core.DataStoreKey, + key core.DataStoreKey, fieldName string, ) *MerkleLWWRegister { - register := corecrdt.NewLWWRegister(datastore, schemaVersionKey, key, fieldName /* stuff like namespace and ID */) - clk := clock.NewMerkleClock(headstore, dagstore, key.ToHeadStoreKey(), register) - - // newBaseMerkleCRDT(clock, register) + register := corecrdt.NewLWWRegister(store.Datastore(), schemaVersionKey, key, fieldName) + clk := clock.NewMerkleClock(store.Headstore(), store.DAGstore(), key.ToHeadStoreKey(), register) base := &baseMerkleCRDT{clock: clk, crdt: register} - // instantiate MerkleLWWRegister - // return return &MerkleLWWRegister{ baseMerkleCRDT: base, reg: register, @@ -89,17 +49,6 @@ func (mlwwreg *MerkleLWWRegister) Set(ctx context.Context, value []byte) (ipld.N // Set() call on underlying LWWRegister CRDT // persist/publish delta delta := mlwwreg.reg.Set(value) - nd, err := mlwwreg.Publish(ctx, delta) + nd, err := mlwwreg.clock.AddDAGNode(ctx, delta) return nd, delta.GetPriority(), err } - -// Value will retrieve the current value from the db. -func (mlwwreg *MerkleLWWRegister) Value(ctx context.Context) ([]byte, error) { - return mlwwreg.reg.Value(ctx) -} - -// Merge writes the provided delta to state using a supplied -// merge semantic. -func (mlwwreg *MerkleLWWRegister) Merge(ctx context.Context, other core.Delta) error { - return mlwwreg.reg.Merge(ctx, other) -} diff --git a/merkle/crdt/merklecrdt.go b/merkle/crdt/merklecrdt.go index 89e8d0eb2e..07fb83e436 100644 --- a/merkle/crdt/merklecrdt.go +++ b/merkle/crdt/merklecrdt.go @@ -11,15 +11,16 @@ /* Package crdt provides CRDT implementations leveraging MerkleClock. */ -package crdt +package merklecrdt import ( "context" ipld "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" - "github.com/sourcenetwork/defradb/events" + "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/logging" ) @@ -27,6 +28,12 @@ var ( log = logging.MustNewLogger("merklecrdt") ) +type Stores interface { + Datastore() datastore.DSReaderWriter + DAGstore() datastore.DAGStore + Headstore() datastore.DSReaderWriter +} + // MerkleCRDT is the implementation of a Merkle Clock along with a // CRDT payload. It implements the ReplicatedData interface // so it can be merged with any given semantics. @@ -35,18 +42,13 @@ type MerkleCRDT interface { Clock() core.MerkleClock } -var ( - // defaultMerkleCRDTs = make(map[Type]MerkleCRDTFactory) - _ core.ReplicatedData = (*baseMerkleCRDT)(nil) -) +var _ core.ReplicatedData = (*baseMerkleCRDT)(nil) // baseMerkleCRDT handles the MerkleCRDT overhead functions that aren't CRDT specific like the mutations and state // retrieval functions. It handles creating and publishing the CRDT DAG with the help of the MerkleClock. type baseMerkleCRDT struct { clock core.MerkleClock crdt core.ReplicatedData - - updateChannel events.UpdateChannel } func (base *baseMerkleCRDT) Clock() core.MerkleClock { @@ -65,19 +67,28 @@ func (base *baseMerkleCRDT) Value(ctx context.Context) ([]byte, error) { return base.crdt.Value(ctx) } -func (base *baseMerkleCRDT) ID() string { - return base.crdt.ID() -} - -// Publishes the delta to state. -func (base *baseMerkleCRDT) Publish( - ctx context.Context, - delta core.Delta, -) (ipld.Node, error) { - log.Debug(ctx, "Processing CRDT state", logging.NewKV("DocKey", base.crdt.ID())) - nd, err := base.clock.AddDAGNode(ctx, delta) - if err != nil { - return nil, err +func InstanceWithStore( + store Stores, + schemaVersionKey core.CollectionSchemaVersionKey, + ctype client.CType, + key core.DataStoreKey, + fieldName string, +) (MerkleCRDT, error) { + switch ctype { + case client.LWW_REGISTER: + return NewMerkleLWWRegister( + store, + schemaVersionKey, + key, + fieldName, + ), nil + case client.COMPOSITE: + return NewMerkleCompositeDAG( + store, + schemaVersionKey, + key, + fieldName, + ), nil } - return nd, nil + return nil, client.NewErrUnknownCRDT(ctype) } diff --git a/merkle/crdt/merklecrdt_test.go b/merkle/crdt/merklecrdt_test.go index 675fcfe38f..47537add09 100644 --- a/merkle/crdt/merklecrdt_test.go +++ b/merkle/crdt/merklecrdt_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package crdt +package merklecrdt import ( "context" @@ -45,7 +45,7 @@ func TestMerkleCRDTPublish(t *testing.T) { Data: []byte("test"), } - nd, err := bCRDT.Publish(ctx, delta) + nd, err := bCRDT.clock.AddDAGNode(ctx, delta) if err != nil { t.Error("Failed to publish delta to MerkleCRDT:", err) return diff --git a/net/process.go b/net/process.go index 85748090ff..3d776cc1c1 100644 --- a/net/process.go +++ b/net/process.go @@ -28,9 +28,8 @@ import ( "github.com/sourcenetwork/defradb/datastore" "github.com/sourcenetwork/defradb/db/base" "github.com/sourcenetwork/defradb/errors" - "github.com/sourcenetwork/defradb/events" "github.com/sourcenetwork/defradb/logging" - "github.com/sourcenetwork/defradb/merkle/crdt" + merklecrdt "github.com/sourcenetwork/defradb/merkle/crdt" ) type blockProcessor struct { @@ -123,11 +122,11 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field func initCRDTForType( ctx context.Context, - txn datastore.MultiStore, + txn datastore.Txn, col client.Collection, dsKey core.DataStoreKey, field string, -) (crdt.MerkleCRDT, error) { +) (merklecrdt.MerkleCRDT, error) { var key core.DataStoreKey var ctype client.CType description := col.Description() @@ -140,24 +139,31 @@ func initCRDTForType( ).WithFieldId( core.COMPOSITE_NAMESPACE, ) - } else { - fd, ok := col.Schema().GetField(field) - if !ok { - return nil, errors.New(fmt.Sprintf("Couldn't find field %s for doc %s", field, dsKey)) - } - ctype = fd.Typ - fieldID := fd.ID.String() - key = base.MakeCollectionKey(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) + + log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) + return merklecrdt.NewMerkleCompositeDAG( + txn, + core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), + key, + field, + ), nil } + + fd, ok := col.Schema().GetField(field) + if !ok { + return nil, errors.New(fmt.Sprintf("Couldn't find field %s for doc %s", field, dsKey)) + } + ctype = fd.Typ + fieldID := fd.ID.String() + key = base.MakeCollectionKey(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) + log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) - return crdt.DefaultFactory.InstanceWithStores( + return merklecrdt.NewMerkleLWWRegister( txn, core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), - events.EmptyUpdateChannel, - ctype, key, field, - ) + ), nil } func decodeBlockBuffer(buf []byte, cid cid.Cid) (ipld.Node, error) { From 81fea6e7a8c5a1e5216bdb25d4bbe6e21e91d7f1 Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Fri, 8 Dec 2023 15:18:46 +0100 Subject: [PATCH 20/60] feat: New cmd for docs auto generation (#2096) ## Relevant issue(s) Resolves #2095 ## Description This change adds a new command for generating documents automatically. It also includes fixes for some overlooked edge cases. --- cmd/gendocs/main.go | 34 ++++++ tests/gen/cli/errors.go | 21 ++++ tests/gen/cli/gendocs.go | 159 +++++++++++++++++++++++++++++ tests/gen/cli/gendocs_test.go | 100 ++++++++++++++++++ tests/gen/cli/util_test.go | 107 +++++++++++++++++++ tests/gen/gen_auto_configurator.go | 108 ++++++++++++-------- tests/gen/gen_auto_option.go | 2 +- tests/gen/gen_auto_test.go | 92 ++++++++++++++++- 8 files changed, 576 insertions(+), 47 deletions(-) create mode 100644 cmd/gendocs/main.go create mode 100644 tests/gen/cli/errors.go create mode 100644 tests/gen/cli/gendocs.go create mode 100644 tests/gen/cli/gendocs_test.go create mode 100644 tests/gen/cli/util_test.go diff --git a/cmd/gendocs/main.go b/cmd/gendocs/main.go new file mode 100644 index 0000000000..44901b0faf --- /dev/null +++ b/cmd/gendocs/main.go @@ -0,0 +1,34 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +/* +gendocs is a tool to generate the collections' documents automatically. +*/ +package main + +import ( + "os" + + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/tests/gen/cli" +) + +func main() { + conf := config.DefaultConfig() + gendocsCmd := cli.MakeGenDocCommand(conf) + if err := gendocsCmd.Execute(); err != nil { + // this error is okay to discard because cobra + // logs any errors encountered during execution + // + // exiting with a non-zero status code signals + // that an error has ocurred during execution + os.Exit(1) + } +} diff --git a/tests/gen/cli/errors.go b/tests/gen/cli/errors.go new file mode 100644 index 0000000000..0ebb8bda36 --- /dev/null +++ b/tests/gen/cli/errors.go @@ -0,0 +1,21 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import "github.com/sourcenetwork/defradb/errors" + +const ( + errInvalidDemandValue string = "invalid demand value" +) + +func NewErrInvalidDemandValue(inner error) error { + return errors.Wrap(errInvalidDemandValue, inner) +} diff --git a/tests/gen/cli/gendocs.go b/tests/gen/cli/gendocs.go new file mode 100644 index 0000000000..6d388eaf67 --- /dev/null +++ b/tests/gen/cli/gendocs.go @@ -0,0 +1,159 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "encoding/json" + "io" + "strconv" + "strings" + + "github.com/spf13/cobra" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/http" + "github.com/sourcenetwork/defradb/tests/gen" +) + +const defaultBatchSize = 1000 + +func MakeGenDocCommand(cfg *config.Config) *cobra.Command { + var demandJSON string + + var cmd = &cobra.Command{ + Use: "gendocs --demand ", + Short: "Automatically generates documents for existing collections.", + Long: `Automatically generates documents for existing collections. + +Example: The following command generates 100 User documents and 500 Device documents: + gendocs --demand '{"User": 100, "Device": 500 }'`, + ValidArgs: []string{"demand"}, + RunE: func(cmd *cobra.Command, args []string) error { + // cobra does not chain pre run calls so we have to run them again here + if err := loadConfig(cfg); err != nil { + return err + } + store, err := http.NewClient(cfg.API.Address) + if err != nil { + return err + } + + demandMap := make(map[string]int) + err = json.Unmarshal([]byte(demandJSON), &demandMap) + if err != nil { + return NewErrInvalidDemandValue(err) + } + + collections, err := store.GetAllCollections(cmd.Context()) + if err != nil { + return err + } + + opts := []gen.Option{} + for colName, numDocs := range demandMap { + opts = append(opts, gen.WithTypeDemand(colName, numDocs)) + } + docs, err := gen.AutoGenerate(colsToDefs(collections), opts...) + if err != nil { + return err + } + + out := cmd.OutOrStdout() + _, err = out.Write([]byte("Generated " + strconv.Itoa(len(docs)) + + " documents. Adding to collections...\n")) + if err != nil { + return err + } + + batchOffset := 0 + for batchOffset < len(docs) { + batchLen := defaultBatchSize + if batchOffset+batchLen > len(docs) { + batchLen = len(docs) - batchOffset + } + + colDocsMap := groupDocsByCollection(docs[batchOffset : batchOffset+batchLen]) + + err = saveBatchToCollections(context.Background(), collections, colDocsMap) + if err != nil { + return err + } + + err = reportSavedBatch(out, batchLen, colDocsMap) + if err != nil { + return err + } + + batchOffset += batchLen + } + + return nil + }, + } + cmd.Flags().StringVarP(&demandJSON, "demand", "d", "", "Documents' demand in JSON format") + + return cmd +} + +func reportSavedBatch(out io.Writer, thisBatch int, colDocsMap map[string][]*client.Document) error { + reports := make([]string, 0, len(colDocsMap)) + for colName, colDocs := range colDocsMap { + reports = append(reports, strconv.Itoa(len(colDocs))+" "+colName) + } + + r := strings.Join(reports, ", ") + _, err := out.Write([]byte("Added " + strconv.Itoa(thisBatch) + " documents: " + r + "\n")) + return err +} + +func saveBatchToCollections( + ctx context.Context, + collections []client.Collection, + colDocsMap map[string][]*client.Document, +) error { + for colName, colDocs := range colDocsMap { + for _, col := range collections { + if col.Description().Name == colName { + err := col.CreateMany(context.Background(), colDocs) + if err != nil { + return err + } + break + } + } + } + return nil +} + +func groupDocsByCollection(docs []gen.GeneratedDoc) map[string][]*client.Document { + result := make(map[string][]*client.Document) + for _, doc := range docs { + result[doc.Col.Description.Name] = append(result[doc.Col.Description.Name], doc.Doc) + } + return result +} + +func colsToDefs(cols []client.Collection) []client.CollectionDefinition { + var colDefs []client.CollectionDefinition + for _, col := range cols { + colDefs = append(colDefs, col.Definition()) + } + return colDefs +} + +func loadConfig(cfg *config.Config) error { + if err := cfg.LoadRootDirFromFlagOrDefault(); err != nil { + return err + } + return cfg.LoadWithRootdir(cfg.ConfigFileExists()) +} diff --git a/tests/gen/cli/gendocs_test.go b/tests/gen/cli/gendocs_test.go new file mode 100644 index 0000000000..18b9b157c1 --- /dev/null +++ b/tests/gen/cli/gendocs_test.go @@ -0,0 +1,100 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "bytes" + "io" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/sourcenetwork/defradb/cli" + "github.com/sourcenetwork/defradb/config" + "github.com/sourcenetwork/defradb/tests/gen" +) + +func execAddSchemaCmd(t *testing.T, cfg *config.Config, schema string) { + rootCmd := cli.NewDefraCommand(cfg) + rootCmd.SetArgs([]string{"client", "schema", "add", schema}) + err := rootCmd.Execute() + require.NoError(t, err) +} + +func TestGendocsCmd_IfNoErrors_ReturnGenerationOutput(t *testing.T) { + cfg, _, close := startTestNode(t) + defer close() + + execAddSchemaCmd(t, cfg, ` + type User { + name: String + devices: [Device] + } + type Device { + model: String + owner: User + }`) + + genDocsCmd := MakeGenDocCommand(cfg) + outputBuf := bytes.NewBufferString("") + genDocsCmd.SetOut(outputBuf) + + genDocsCmd.SetArgs([]string{"--demand", `{"User": 3, "Device": 12}`}) + + err := genDocsCmd.Execute() + require.NoError(t, err) + + out, err := io.ReadAll(outputBuf) + require.NoError(t, err) + + outStr := string(out) + require.NoError(t, err) + + assert.Contains(t, outStr, "15") + assert.Contains(t, outStr, "3") + assert.Contains(t, outStr, "12") + assert.Contains(t, outStr, "User") + assert.Contains(t, outStr, "Device") +} + +func TestGendocsCmd_IfInvalidDemandValue_ReturnError(t *testing.T) { + cfg, _, close := startTestNode(t) + defer close() + + execAddSchemaCmd(t, cfg, ` + type User { + name: String + }`) + + genDocsCmd := MakeGenDocCommand(cfg) + genDocsCmd.SetArgs([]string{"--demand", `{"User": invalid}`}) + + err := genDocsCmd.Execute() + require.ErrorContains(t, err, errInvalidDemandValue) +} + +func TestGendocsCmd_IfInvalidConfig_ReturnError(t *testing.T) { + cfg, _, close := startTestNode(t) + defer close() + + execAddSchemaCmd(t, cfg, ` + type User { + name: String + }`) + + genDocsCmd := MakeGenDocCommand(cfg) + + genDocsCmd.SetArgs([]string{"--demand", `{"Unknown": 3}`}) + + err := genDocsCmd.Execute() + require.Error(t, err, gen.NewErrInvalidConfiguration("")) +} diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go new file mode 100644 index 0000000000..2e93f7b146 --- /dev/null +++ b/tests/gen/cli/util_test.go @@ -0,0 +1,107 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "context" + "fmt" + "net/http" + "os" + "testing" + + badger "github.com/sourcenetwork/badger/v4" + "github.com/stretchr/testify/require" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/config" + badgerds "github.com/sourcenetwork/defradb/datastore/badger/v4" + "github.com/sourcenetwork/defradb/db" + "github.com/sourcenetwork/defradb/errors" + httpapi "github.com/sourcenetwork/defradb/http" + "github.com/sourcenetwork/defradb/logging" +) + +var log = logging.MustNewLogger("cli") + +type defraInstance struct { + db client.DB + server *httpapi.Server +} + +func (di *defraInstance) close(ctx context.Context) { + di.db.Close() + if err := di.server.Close(); err != nil { + log.FeedbackInfo( + ctx, + "The server could not be closed successfully", + logging.NewKV("Error", err.Error()), + ) + } +} + +func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { + log.FeedbackInfo(ctx, "Starting DefraDB service...") + + log.FeedbackInfo(ctx, "Building new memory store") + opts := badgerds.Options{Options: badger.DefaultOptions("").WithInMemory(true)} + rootstore, err := badgerds.NewDatastore("", &opts) + + if err != nil { + return nil, errors.Wrap("failed to open datastore", err) + } + + db, err := db.NewDB(ctx, rootstore) + if err != nil { + return nil, errors.Wrap("failed to create database", err) + } + + server, err := httpapi.NewServer(db) + if err != nil { + return nil, errors.Wrap("failed to create http server", err) + } + if err := server.Listen(ctx); err != nil { + return nil, errors.Wrap(fmt.Sprintf("failed to listen on TCP address %v", server.Addr), err) + } + // save the address on the config in case the port number was set to random + cfg.API.Address = server.AssignedAddr() + + // run the server in a separate goroutine + go func() { + log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", cfg.API.AddressToURL())) + if err := server.Run(ctx); err != nil && !errors.Is(err, http.ErrServerClosed) { + log.FeedbackErrorE(ctx, "Failed to run the HTTP server", err) + db.Close() + os.Exit(1) + } + }() + + return &defraInstance{ + db: db, + server: server, + }, nil +} + +func getTestConfig(t *testing.T) *config.Config { + cfg := config.DefaultConfig() + cfg.Datastore.Store = "memory" + cfg.Datastore.Badger.Path = t.TempDir() + cfg.Net.P2PDisabled = true + return cfg +} + +func startTestNode(t *testing.T) (*config.Config, *defraInstance, func()) { + cfg := getTestConfig(t) + + ctx := context.Background() + di, err := start(ctx, cfg) + require.NoError(t, err) + return cfg, di, func() { di.close(ctx) } +} diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index 30f0f70efe..55a15737ea 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -66,7 +66,7 @@ func newTypeUsageCounter(random *rand.Rand) typeUsageCounters { func (c *typeUsageCounters) addRelationUsage( secondaryType string, field client.FieldDescription, - min, max, numDocs int, + minPerDoc, maxPerDoc, numDocs int, ) { primaryType := field.Schema if _, ok := c.m[primaryType]; !ok { @@ -76,7 +76,7 @@ func (c *typeUsageCounters) addRelationUsage( c.m[primaryType][secondaryType] = make(map[string]*relationUsage) } if _, ok := c.m[primaryType][secondaryType][field.Name]; !ok { - c.m[primaryType][secondaryType][field.Name] = newRelationUsage(min, max, numDocs, c.random) + c.m[primaryType][secondaryType][field.Name] = newRelationUsage(minPerDoc, maxPerDoc, numDocs, c.random) } } @@ -89,10 +89,10 @@ func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field * type relationUsage struct { // counter is the number of primary documents that have been used for the relation. counter int - // minAmount is the minimum number of primary documents that should be used for the relation. - minAmount int - // maxAmount is the maximum number of primary documents that should be used for the relation. - maxAmount int + // minSecDocsPerPrimary is the minimum number of primary documents that should be used for the relation. + minSecDocsPerPrimary int + // maxSecDocsPerPrimary is the maximum number of primary documents that should be used for the relation. + maxSecDocsPerPrimary int // docKeysCounter is a slice of structs that keep track of the number of times // each primary document has been used for the relation. docKeysCounter []struct { @@ -101,18 +101,18 @@ type relationUsage struct { // count is the number of times the primary document has been used for the relation. count int } - // numAvailableDocs is the number of documents of the primary type that are available + // numAvailablePrimaryDocs is the number of documents of the primary type that are available // for the relation. - numAvailableDocs int - random *rand.Rand + numAvailablePrimaryDocs int + random *rand.Rand } -func newRelationUsage(minAmount, maxAmount, numDocs int, random *rand.Rand) *relationUsage { +func newRelationUsage(minSecDocPerPrim, maxSecDocPerPrim, numDocs int, random *rand.Rand) *relationUsage { return &relationUsage{ - minAmount: minAmount, - maxAmount: maxAmount, - numAvailableDocs: numDocs, - random: random, + minSecDocsPerPrimary: minSecDocPerPrim, + maxSecDocsPerPrimary: maxSecDocPerPrim, + numAvailablePrimaryDocs: numDocs, + random: random, } } @@ -123,7 +123,7 @@ func (u *relationUsage) useNextDocKey() int { // if a primary document has a minimum number of secondary documents that should be // generated for it, then it should be used until that minimum is reached. // After that, we can pick a random primary document to use. - if u.counter >= u.minAmount*u.numAvailableDocs { + if u.counter >= u.minSecDocsPerPrimary*u.numAvailablePrimaryDocs { docKeyCounterInd = u.random.Intn(len(u.docKeysCounter)) } else { docKeyCounterInd = u.counter % len(u.docKeysCounter) @@ -133,7 +133,7 @@ func (u *relationUsage) useNextDocKey() int { docCounter.count++ // if the primary document reached max number of secondary documents, we can remove it // from the slice of primary documents that are available for the relation. - if docCounter.count >= u.maxAmount { + if docCounter.count >= u.maxSecDocsPerPrimary { lastCounterInd := len(u.docKeysCounter) - 1 *docCounter = u.docKeysCounter[lastCounterInd] u.docKeysCounter = u.docKeysCounter[:lastCounterInd] @@ -148,7 +148,7 @@ func (u *relationUsage) allocateIndexes() { docKeysCounter := make([]struct { ind int count int - }, u.numAvailableDocs) + }, u.numAvailablePrimaryDocs) for i := range docKeysCounter { docKeysCounter[i].ind = i } @@ -207,18 +207,20 @@ func (g *docsGenConfigurator) Configure(options ...Option) error { } func (g *docsGenConfigurator) calculateDocsDemand(initialTypes map[string]typeDemand) error { - for typeName, demand := range initialTypes { - var err error - // from the current type we go up the graph and calculate the demand for primary types - demand, err = g.getPrimaryDemand(typeName, demand, g.primaryGraph) - if err != nil { - return err - } - g.docsDemand[typeName] = demand + for _, typeName := range g.typesOrder { + if demand, ok := initialTypes[typeName]; ok { + var err error + // from the current type we go up the graph and calculate the demand for primary types + demand, err = g.getPrimaryDemand(typeName, demand, g.primaryGraph) + if err != nil { + return err + } + g.docsDemand[typeName] = demand - err = g.calculateDemandForSecondaryTypes(typeName, g.primaryGraph) - if err != nil { - return err + err = g.calculateDemandForSecondaryTypes(typeName, g.primaryGraph) + if err != nil { + return err + } } } @@ -252,11 +254,11 @@ func (g *docsGenConfigurator) allocateUsageCounterIndexes() { } for _, usage := range g.usageCounter.m[typeName] { for _, field := range usage { - if field.numAvailableDocs == math.MaxInt { - field.numAvailableDocs = max + if field.numAvailablePrimaryDocs == math.MaxInt { + field.numAvailablePrimaryDocs = max } - if field.numAvailableDocs > demand.max { - field.numAvailableDocs = demand.max + if field.numAvailablePrimaryDocs > demand.max { + field.numAvailablePrimaryDocs = demand.max } field.allocateIndexes() } @@ -274,6 +276,7 @@ func (g *docsGenConfigurator) getDemandForPrimaryType( if field.IsObject() && field.Schema == secondaryType { primaryDemand := typeDemand{min: secondaryDemand.min, max: secondaryDemand.max} minPerDoc, maxPerDoc := 1, 1 + if field.IsArray() { fieldConf := g.config.ForField(primaryType, field.Name) minPerDoc, maxPerDoc = getMinMaxOrDefault(fieldConf, 0, secondaryDemand.max) @@ -339,22 +342,38 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( if field.IsObject() && !field.IsPrimaryRelation() { primaryDocDemand := g.docsDemand[typeName] newSecDemand := typeDemand{min: primaryDocDemand.min, max: primaryDocDemand.max} - min, max := 1, 1 + minPerDoc, maxPerDoc := 1, 1 + + curSecDemand, hasSecDemand := g.docsDemand[field.Schema] if field.IsArray() { fieldConf := g.config.ForField(typeName, field.Name) - min, max = getMinMaxOrDefault(fieldConf, DefaultNumChildrenPerDoc, DefaultNumChildrenPerDoc) - newSecDemand.max = primaryDocDemand.min * max - newSecDemand.min = primaryDocDemand.max * min + if prop, ok := fieldConf.props["min"]; ok { + minPerDoc = prop.(int) + maxPerDoc = fieldConf.props["max"].(int) + newSecDemand.min = primaryDocDemand.max * minPerDoc + newSecDemand.max = primaryDocDemand.min * maxPerDoc + } else if hasSecDemand { + minPerDoc = curSecDemand.min / primaryDocDemand.max + maxPerDoc = curSecDemand.max / primaryDocDemand.min + newSecDemand.min = curSecDemand.min + newSecDemand.max = curSecDemand.max + } else { + minPerDoc = DefaultNumChildrenPerDoc + maxPerDoc = DefaultNumChildrenPerDoc + newSecDemand.min = primaryDocDemand.max * minPerDoc + newSecDemand.max = primaryDocDemand.min * maxPerDoc + } } - curSecDemand := g.docsDemand[field.Schema] - if curSecDemand.usedDefined && - (curSecDemand.min < newSecDemand.min || curSecDemand.max > newSecDemand.max) { - return NewErrCanNotSupplyTypeDemand(field.Schema) + if hasSecDemand { + if curSecDemand.min < newSecDemand.min || curSecDemand.max > newSecDemand.max { + return NewErrCanNotSupplyTypeDemand(field.Schema) + } + } else { + g.docsDemand[field.Schema] = newSecDemand } - g.docsDemand[field.Schema] = newSecDemand - g.initRelationUsages(field.Schema, typeName, min, max) + g.initRelationUsages(field.Schema, typeName, minPerDoc, maxPerDoc) err := g.calculateDemandForSecondaryTypes(field.Schema, primaryGraph) if err != nil { @@ -375,11 +394,12 @@ func (g *docsGenConfigurator) calculateDemandForSecondaryTypes( return nil } -func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, min, max int) { +func (g *docsGenConfigurator) initRelationUsages(secondaryType, primaryType string, minPerDoc, maxPerDoc int) { secondaryTypeDef := g.types[secondaryType] for _, secondaryTypeField := range secondaryTypeDef.Schema.Fields { if secondaryTypeField.Schema == primaryType { - g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, min, max, g.docsDemand[primaryType].getAverage()) + g.usageCounter.addRelationUsage(secondaryType, secondaryTypeField, minPerDoc, + maxPerDoc, g.docsDemand[primaryType].getAverage()) } } } diff --git a/tests/gen/gen_auto_option.go b/tests/gen/gen_auto_option.go index 3ed80c531e..304ff24a6c 100644 --- a/tests/gen/gen_auto_option.go +++ b/tests/gen/gen_auto_option.go @@ -25,7 +25,7 @@ func WithTypeDemand(typeName string, demand int) Option { // WithTypeDemandRange configures the demand range for a type. func WithTypeDemandRange(typeName string, min, max int) Option { return func(g *docsGenConfigurator) { - g.docsDemand[typeName] = typeDemand{min: min, max: min, usedDefined: true} + g.docsDemand[typeName] = typeDemand{min: min, max: max, usedDefined: true} } } diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index a29f5b9f28..5e4c62a0a2 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -61,7 +61,7 @@ func getBooleanField(t *testing.T, doc *client.Document, fieldName string) bool } func getDocKeysFromDocs(docs []*client.Document) []string { - var result []string + result := make([]string, 0, len(docs)) for _, doc := range docs { result = append(result, doc.Key().String()) } @@ -613,6 +613,95 @@ func TestAutoGenerateFromSchema_IfNoDemandForPrimaryType_ShouldDeduceFromMaxSeco assert.Len(t, filterByCollection(docs, "Order"), 10) } +func TestAutoGenerateFromSchema_IfDemand2TypesWithOptions_ShouldAdjust(t *testing.T) { + const ( + numUsers = 100 + numDevices = 300 + ) + schema := ` + type User { + name: String + devices: [Device] + } + + type Device { + owner: User + model: String + }` + + docs, err := AutoGenerateFromSDL(schema, + WithTypeDemand("User", numUsers), + WithTypeDemand("Device", numDevices), + ) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numDevices) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) +} + +func TestAutoGenerateFromSchema_IfDemand2TypesWithOptionsAndFieldDemand_ShouldAdjust(t *testing.T) { + const ( + numUsers = 100 + numDevices = 300 + ) + schema := ` + type User { + name: String + devices: [Device] # min: 1, max: 5 + } + + type Device { + owner: User + model: String + }` + + docs, err := AutoGenerateFromSDL(schema, + WithTypeDemand("User", numUsers), + WithTypeDemand("Device", numDevices), + ) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), numDevices) + + assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", 1, 5) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) +} + +func TestAutoGenerateFromSchema_IfDemand2TypesWithRangeOptions_ShouldAdjust(t *testing.T) { + const ( + numUsers = 100 + minNumDevices = 100 + maxNumDevices = 500 + ) + schema := ` + type User { + name: String + devices: [Device] + } + + type Device { + owner: User + model: String + }` + + docs, err := AutoGenerateFromSDL(schema, + WithTypeDemand("User", numUsers), + WithTypeDemandRange("Device", minNumDevices, maxNumDevices), + ) + assert.NoError(t, err) + + assert.Len(t, filterByCollection(docs, "User"), numUsers) + assert.Len(t, filterByCollection(docs, "Device"), (maxNumDevices+minNumDevices)/2) + + assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", 1, 5) + + assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) +} + func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { testCases := []struct { name string @@ -976,7 +1065,6 @@ func TestAutoGenerateFromSchema_CustomFieldValueGenerator(t *testing.T) { for _, doc := range docs { actualAgeVal := getIntField(t, doc.Doc, "age") - //actualAgeVal := getIntField(t, jsonToMap(doc.JSON), "age") assert.Equal(t, ageVal, actualAgeVal) } } From 280fcf5c34f1ad99a7176456b0ebedcf40e2301d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sat, 16 Dec 2023 00:30:11 -0500 Subject: [PATCH 21/60] bot: Update dependencies (bulk dependabot PRs) 04-12-2023 (#2133) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2126 bot: Bump github.com/bits-and-blooms/bitset from 1.11.0 to 1.12.0 #2125 bot: Bump github.com/ipfs/boxo from 0.15.0 to 0.16.0 #2124 bot: Bump @typescript-eslint/parser from 6.13.0 to 6.13.2 in /playground #2123 bot: Bump @types/react from 18.2.42 to 18.2.43 in /playground #2122 bot: Bump typescript from 5.3.2 to 5.3.3 in /playground #2120 bot: Bump graphiql from 3.0.9 to 3.0.10 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #2121 bot: Bump vite from 5.0.5 to 5.0.7 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 4 +- go.sum | 9 ++- playground/package-lock.json | 122 +++++++---------------------------- playground/package.json | 10 +-- 4 files changed, 35 insertions(+), 110 deletions(-) diff --git a/go.mod b/go.mod index 97a7332080..cba12e239d 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/sourcenetwork/defradb go 1.20 require ( - github.com/bits-and-blooms/bitset v1.11.0 + github.com/bits-and-blooms/bitset v1.12.0 github.com/bxcodec/faker v2.0.1+incompatible github.com/evanphx/json-patch/v5 v5.7.0 github.com/fxamacker/cbor/v2 v2.5.0 @@ -13,7 +13,7 @@ require ( github.com/go-errors/errors v1.5.1 github.com/gofrs/uuid/v5 v5.0.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.15.0 + github.com/ipfs/boxo v0.16.0 github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 diff --git a/go.sum b/go.sum index f5d83b2f7e..be78f035ad 100644 --- a/go.sum +++ b/go.sum @@ -58,8 +58,8 @@ github.com/benbjohnson/clock v1.3.5/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZx github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bits-and-blooms/bitset v1.11.0 h1:RMyy2mBBShArUAhfVRZJ2xyBO58KCBCtZFShw3umo6k= -github.com/bits-and-blooms/bitset v1.11.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA= +github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= @@ -290,8 +290,8 @@ github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.15.0 h1:BriLydj2nlK1nKeJQHxcKSuG5ZXcoutzhBklOtxC5pk= -github.com/ipfs/boxo v0.15.0/go.mod h1:X5ulcbR5Nh7sm3Db8+08AApUo6FsGC5mb23QDKAoB/M= +github.com/ipfs/boxo v0.16.0 h1:A9dUmef5a+mEFki6kbyG7el5gl65CiUBzrDeZxzTWKY= +github.com/ipfs/boxo v0.16.0/go.mod h1:jAgpNQn7T7BnibUeReXcKU9Ha1xmYNyOlwVEl193ow0= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= github.com/ipfs/go-block-format v0.2.0/go.mod h1:+jpL11nFx5A/SPpsoBn6Bzkra/zaArfSmsknbPMYgzM= @@ -621,7 +621,6 @@ github.com/vito/go-sse v1.0.0/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpg github.com/warpfork/go-testmark v0.12.1 h1:rMgCpJfwy1sJ50x0M0NgyphxYYPMOODIJHhsXyEHU0s= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0 h1:GDDkbFiaK8jsSDJfjId/PEGEShv6ugrt4kYsC5UIDaQ= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= -github.com/whyrusleeping/base32 v0.0.0-20170828182744-c30ac30633cc h1:BCPnHtcboadS0DvysUuJXZ4lWVv5Bh5i7+tbIyi+ck4= github.com/whyrusleeping/chunker v0.0.0-20181014151217-fe64bd25879f h1:jQa4QT2UP9WYv2nzyawpKMOCl+Z/jW7djv2/J50lj9E= github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdznlJHPMoKr0XTrX+IlJs1LH3lyx2nfr1dOlZ79k= github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc= diff --git a/playground/package-lock.json b/playground/package-lock.json index 3dc2a67400..d1a1599e69 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -8,24 +8,24 @@ "name": "playground", "version": "0.0.0", "dependencies": { - "graphiql": "^3.0.9", + "graphiql": "^3.0.10", "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.42", + "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.2", - "@typescript-eslint/parser": "^6.13.0", + "@typescript-eslint/parser": "^6.13.2", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.55.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", - "typescript": "^5.3.2", - "vite": "^5.0.5" + "typescript": "^5.3.3", + "vite": "^5.0.7" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.42", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.42.tgz", - "integrity": "sha512-c1zEr96MjakLYus/wPnuWDo1/zErfdU9rNsIGmE+NV71nx88FG9Ttgo5dqorXTu/LImX2f63WBP986gJkMPNbA==", + "version": "18.2.43", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.43.tgz", + "integrity": "sha512-nvOV01ZdBdd/KW6FahSbcNplt2jCJfyWdTos61RYHV+FVv5L/g9AOX1bmbVcWcLFL8+KHQfh1zVIQrud6ihyQA==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -2329,15 +2329,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.13.0.tgz", - "integrity": "sha512-VpG+M7GNhHLI/aTDctqAV0XbzB16vf+qDX9DXuMZSe/0bahzDA9AKZB15NDbd+D9M4cDsJvfkbGOA7qiZ/bWJw==", + "version": "6.13.2", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.13.2.tgz", + "integrity": "sha512-MUkcC+7Wt/QOGeVlM8aGGJZy1XV5YKjTpq9jK6r6/iLsGXhBVaGP5N0UYvFsu9BFlSpwY9kMretzdBH01rkRXg==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.13.0", - "@typescript-eslint/types": "6.13.0", - "@typescript-eslint/typescript-estree": "6.13.0", - "@typescript-eslint/visitor-keys": "6.13.0", + "@typescript-eslint/scope-manager": "6.13.2", + "@typescript-eslint/types": "6.13.2", + "@typescript-eslint/typescript-estree": "6.13.2", + "@typescript-eslint/visitor-keys": "6.13.2", "debug": "^4.3.4" }, "engines": { @@ -2356,80 +2356,6 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.0.tgz", - "integrity": "sha512-2x0K2/CujsokIv+LN2T0l5FVDMtsCjkUyYtlcY4xxnxLAW+x41LXr16duoicHpGtLhmtN7kqvuFJ3zbz00Ikhw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.13.0", - "@typescript-eslint/visitor-keys": "6.13.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.13.0.tgz", - "integrity": "sha512-oXg7DFxx/GmTrKXKKLSoR2rwiutOC7jCQ5nDH5p5VS6cmHE1TcPTaYQ0VPSSUvj7BnNqCgQ/NXcTBxn59pfPTQ==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.0.tgz", - "integrity": "sha512-IT4O/YKJDoiy/mPEDsfOfp+473A9GVqXlBKckfrAOuVbTqM8xbc0LuqyFCcgeFWpqu3WjQexolgqN2CuWBYbog==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.13.0", - "@typescript-eslint/visitor-keys": "6.13.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.0.tgz", - "integrity": "sha512-UQklteCEMCRoq/1UhKFZsHv5E4dN1wQSzJoxTfABasWk1HgJRdg1xNUve/Kv/Sdymt4x+iEzpESOqRFlQr/9Aw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.13.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/scope-manager": { "version": "6.13.2", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.2.tgz", @@ -3745,9 +3671,9 @@ "dev": true }, "node_modules/graphiql": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.0.9.tgz", - "integrity": "sha512-xl9yEr6U4Wc3wmqvtP2sV2a3zGQkqrAMtU90x45QnpNT9MBgBn38HD1Yg5jExXxER65xmYWlGoYdAiD8v/dbEw==", + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.0.10.tgz", + "integrity": "sha512-xgRFCg0mgIyca8keWkmBFA3knh9exDg53SxqFh96ewoMWYLeziqc0xIGFe2L/As8Aw1u5pFZcW913HwX3IXztw==", "dependencies": { "@graphiql/react": "^0.20.2", "@graphiql/toolkit": "^0.9.1", @@ -5869,9 +5795,9 @@ } }, "node_modules/typescript": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.2.tgz", - "integrity": "sha512-6l+RyNy7oAHDfxC4FzSJcz9vnjTKxrLpDG5M2Vu4SHRVNg6xzqZp6LYSR9zjqQTu8DU/f5xwxUdADOkbrIX2gQ==", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", + "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -5984,9 +5910,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.5.tgz", - "integrity": "sha512-OekeWqR9Ls56f3zd4CaxzbbS11gqYkEiBtnWFFgYR2WV8oPJRRKq0mpskYy/XaoCL3L7VINDhqqOMNDiYdGvGg==", + "version": "5.0.7", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.7.tgz", + "integrity": "sha512-B4T4rJCDPihrQo2B+h1MbeGL/k/GMAHzhQ8S0LjQ142s6/+l3hHTT095ORvsshj4QCkoWu3Xtmob5mazvakaOw==", "dev": true, "dependencies": { "esbuild": "^0.19.3", diff --git a/playground/package.json b/playground/package.json index fa7d5a2f90..655937f159 100644 --- a/playground/package.json +++ b/playground/package.json @@ -10,23 +10,23 @@ "preview": "vite preview" }, "dependencies": { - "graphiql": "^3.0.9", + "graphiql": "^3.0.10", "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.42", + "@types/react": "^18.2.43", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.2", - "@typescript-eslint/parser": "^6.13.0", + "@typescript-eslint/parser": "^6.13.2", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.55.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", - "typescript": "^5.3.2", - "vite": "^5.0.5" + "typescript": "^5.3.3", + "vite": "^5.0.7" } } From 8ae63a2c1f7edd9903e59abb29739aebc228a1f6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Dec 2023 17:32:03 -0500 Subject: [PATCH 22/60] bot: Bump vite from 5.0.7 to 5.0.10 in /playground (#2135) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 5.0.7 to 5.0.10.
Changelog

Sourced from vite's changelog.

5.0.10 (2023-12-15)

  • fix: omit protocol does not require pre-transform (#15355) (d9ae1b2), closes #15355
  • fix(build): use base64 for inline SVG if it contains both single and double quotes (#15271) (1bbff16), closes #15271

5.0.9 (2023-12-14)

5.0.8 (2023-12-12)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=vite&package-manager=npm_and_yarn&previous-version=5.0.7&new-version=5.0.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index d1a1599e69..6e4707bc0c 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -25,7 +25,7 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", - "vite": "^5.0.7" + "vite": "^5.0.10" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -5910,9 +5910,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.0.7", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.7.tgz", - "integrity": "sha512-B4T4rJCDPihrQo2B+h1MbeGL/k/GMAHzhQ8S0LjQ142s6/+l3hHTT095ORvsshj4QCkoWu3Xtmob5mazvakaOw==", + "version": "5.0.10", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.10.tgz", + "integrity": "sha512-2P8J7WWgmc355HUMlFrwofacvr98DAjoE52BfdbwQtyLH06XKwaL/FMnmKM2crF0iX4MpmMKoDlNCB1ok7zHCw==", "dev": true, "dependencies": { "esbuild": "^0.19.3", diff --git a/playground/package.json b/playground/package.json index 655937f159..a9fe671a5c 100644 --- a/playground/package.json +++ b/playground/package.json @@ -27,6 +27,6 @@ "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", - "vite": "^5.0.7" + "vite": "^5.0.10" } } From 8235a5d502ca8d10d579279ad7d22e0e10761c7d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 16 Dec 2023 18:56:18 -0500 Subject: [PATCH 23/60] bot: Bump @types/react from 18.2.43 to 18.2.45 in /playground (#2134) Bumps [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) from 18.2.43 to 18.2.45.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@types/react&package-manager=npm_and_yarn&previous-version=18.2.43&new-version=18.2.45)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 6e4707bc0c..a4b84686cc 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,7 +15,7 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.43", + "@types/react": "^18.2.45", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.2", @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.43", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.43.tgz", - "integrity": "sha512-nvOV01ZdBdd/KW6FahSbcNplt2jCJfyWdTos61RYHV+FVv5L/g9AOX1bmbVcWcLFL8+KHQfh1zVIQrud6ihyQA==", + "version": "18.2.45", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.45.tgz", + "integrity": "sha512-TtAxCNrlrBp8GoeEp1npd5g+d/OejJHFxS3OWmrPBMFaVQMSN0OFySozJio5BHxTuTeug00AVXVAjfDSfk+lUg==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", diff --git a/playground/package.json b/playground/package.json index a9fe671a5c..0d2e74bec0 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,7 +17,7 @@ "swagger-ui-react": "^5.10.3" }, "devDependencies": { - "@types/react": "^18.2.43", + "@types/react": "^18.2.45", "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.2", From cb9f650ebf24d8cede4403eda890241e1328171a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 17 Dec 2023 23:10:59 -0500 Subject: [PATCH 24/60] bot: Bump @typescript-eslint/parser from 6.13.2 to 6.14.0 in /playground (#2136) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 6.13.2 to 6.14.0.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v6.14.0

6.14.0 (2023-12-11)

Bug Fixes

  • eslint-plugin: add no-unsafe-unary-minus, prefer-destructuring to disable-type-checked (#8038) (431cd15)
  • eslint-plugin: correct message for no-unsafe-unary-minus (#7998) (705370a)

Features

  • eslint-plugin: [explicit-function-return-type] add support for typed class property definitions (#8027) (bff47d7)
  • eslint-plugin: [require-await] allow yielding Promise in async generators (#8003) (4c3e704)

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

6.14.0 (2023-12-11)

Note: Version bump only for package @​typescript-eslint/parser

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=6.13.2&new-version=6.14.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 90 ++++++++++++++++++++++++++++++++---- playground/package.json | 2 +- 2 files changed, 83 insertions(+), 9 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index a4b84686cc..98e5ac22bb 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.2", - "@typescript-eslint/parser": "^6.13.2", + "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.55.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2329,15 +2329,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.13.2.tgz", - "integrity": "sha512-MUkcC+7Wt/QOGeVlM8aGGJZy1XV5YKjTpq9jK6r6/iLsGXhBVaGP5N0UYvFsu9BFlSpwY9kMretzdBH01rkRXg==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.14.0.tgz", + "integrity": "sha512-QjToC14CKacd4Pa7JK4GeB/vHmWFJckec49FR4hmIRf97+KXole0T97xxu9IFiPxVQ1DBWrQ5wreLwAGwWAVQA==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.13.2", - "@typescript-eslint/types": "6.13.2", - "@typescript-eslint/typescript-estree": "6.13.2", - "@typescript-eslint/visitor-keys": "6.13.2", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", "debug": "^4.3.4" }, "engines": { @@ -2356,6 +2356,80 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz", + "integrity": "sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", + "integrity": "sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz", + "integrity": "sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", + "integrity": "sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.14.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "6.13.2", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.2.tgz", diff --git a/playground/package.json b/playground/package.json index 0d2e74bec0..27d34ae436 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.2.17", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.13.2", - "@typescript-eslint/parser": "^6.13.2", + "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.55.0", "eslint-plugin-react-hooks": "^4.6.0", From acc8e3e8c79b2d2e095b7e758a2c0a0932122b6b Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 07:09:50 -0800 Subject: [PATCH 25/60] bot: Update dependencies (bulk dependabot PRs) 18-12-2023 (#2142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2141 bot: Bump google.golang.org/grpc from 1.59.0 to 1.60.0 #2140 bot: Bump eslint from 8.55.0 to 8.56.0 in /playground #2139 bot: Bump swagger-ui-react from 5.10.3 to 5.10.5 in /playground #2138 bot: Bump @types/react-dom from 18.2.17 to 18.2.18 in /playground #2137 bot: Bump @typescript-eslint/eslint-plugin from 6.13.2 to 6.14.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 +- go.sum | 8 +- playground/package-lock.json | 594 +++++++++++++++-------------------- playground/package.json | 8 +- 4 files changed, 270 insertions(+), 344 deletions(-) diff --git a/go.mod b/go.mod index cba12e239d..bc0d5bbb38 100644 --- a/go.mod +++ b/go.mod @@ -49,7 +49,7 @@ require ( golang.org/x/crypto v0.16.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa golang.org/x/net v0.19.0 - google.golang.org/grpc v1.59.0 + google.golang.org/grpc v1.60.0 google.golang.org/protobuf v1.31.0 ) @@ -186,7 +186,7 @@ require ( golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.15.0 // indirect gonum.org/v1/gonum v0.13.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect diff --git a/go.sum b/go.sum index be78f035ad..59a628e26b 100644 --- a/go.sum +++ b/go.sum @@ -999,8 +999,8 @@ google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13 h1:N3bU/SQDCDyD6R528GJ/PwW9KjYcJA3dgyH+MovAkIM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230920204549-e6e6cdab5c13/go.mod h1:KSqppvjFjtoCI+KGd4PELB0qLNxdJHRGqRI09mB6pQA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97 h1:6GQBEOdGkX6MMTLT9V+TjtIRZCw9VPD5Z+yHY9wMgS0= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97/go.mod h1:v7nGkzlmW8P3n/bKmWBn2WpBjpOEx8Q6gMueudAmKfY= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= @@ -1020,8 +1020,8 @@ google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= -google.golang.org/grpc v1.59.0 h1:Z5Iec2pjwb+LEOqzpB2MR12/eKFhDPhuqW91O+4bwUk= -google.golang.org/grpc v1.59.0/go.mod h1:aUPDwccQo6OTjy7Hct4AfBPD1GptF4fyUjIkQ9YtF98= +google.golang.org/grpc v1.60.0 h1:6FQAR0kM31P6MRdeluor2w2gPaS4SVNrD/DNTxrQ15k= +google.golang.org/grpc v1.60.0/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/playground/package-lock.json b/playground/package-lock.json index 98e5ac22bb..92af0a2b90 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,16 +12,16 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.10.3" + "swagger-ui-react": "^5.10.5" }, "devDependencies": { "@types/react": "^18.2.45", - "@types/react-dom": "^18.2.17", + "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.13.2", + "@typescript-eslint/eslint-plugin": "^6.14.0", "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react-swc": "^3.5.0", - "eslint": "^8.55.0", + "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", @@ -49,9 +49,9 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.23.2", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.23.2.tgz", - "integrity": "sha512-54cIh74Z1rp4oIjsHjqN+WM4fMyCBYe+LpZ9jWm51CZ1fbH3SkAzQD/3XLoNkjbJ7YEmjobLXyvQrFypRHOrXw==", + "version": "7.23.6", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.23.6.tgz", + "integrity": "sha512-Djs/ZTAnpyj0nyg7p1J6oiE/tZ9G2stqAFlLGZynrW+F3k2w2jGK2mLOBxzYIOcZYA89+c3d3wXKpYLcpwcU6w==", "dependencies": { "core-js-pure": "^3.30.2", "regenerator-runtime": "^0.14.0" @@ -511,9 +511,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.55.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.55.0.tgz", - "integrity": "sha512-qQfo2mxH5yVom1kacMtZZJFVdW+E70mqHMJvVg6WTLo+VBuQJ4TojZlfWBjK0ve5BdEeNAVxOsl/nvNMpJOaJA==", + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", + "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1558,251 +1558,251 @@ ] }, "node_modules/@swagger-api/apidom-ast": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.83.0.tgz", - "integrity": "sha512-zAn9kHFi2JmEldYxzw6x7rbKxL4NVWvOeCWQL0AlwcWHPRhW+16/1VeHNhoWeiWm6QMERNT8z0o5frg+2czb6g==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.88.0.tgz", + "integrity": "sha512-Gsp2VRWRrekIvxWRV8dEdigRpxGc0PSM+tB7MC1BJJEMZvKzj+xWcU2QcDJLO2/DgBIRvsvtEX8ZfLWqUubT2A==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-error": "^0.83.0", + "@swagger-api/apidom-error": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2", "unraw": "^3.0.0" } }, "node_modules/@swagger-api/apidom-core": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.83.0.tgz", - "integrity": "sha512-4pWzSbxfYrS5rH7tl4WLO5nyR7pF+aAIymwsyV2Xrec44p6d4UZaJEn1iI3r9PBBdlmOHPKgr3QiOxn71Q3XUA==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.88.0.tgz", + "integrity": "sha512-Zfeww6tphn1eDaAHhECFEULnBspF0u1J2x1a5x7E3LMS7OuyE+/53xOyN71UAakvX1+K+Cw8UBLxR0yGbFEMow==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", + "@swagger-api/apidom-ast": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", "@types/ramda": "~0.29.6", "minim": "~0.23.8", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "short-unique-id": "^5.0.2", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-error": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.83.0.tgz", - "integrity": "sha512-0T3B+5Q2cApW0EkcMAqpgvsj+ab46HPvkVsYClA9/L0suRvyPiI5XDkHsw26qPGsmuB5nCH4hveZHlbWwRINMg==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.88.0.tgz", + "integrity": "sha512-RBhk2rlZn/oi916cgwKj+b/ynHHHabAcVzi0T7VY38JbU+6ab8F+JDbUSvFK42vmCF3/FSnpf7CnIm6TEBuaQA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7" } }, "node_modules/@swagger-api/apidom-json-pointer": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.83.0.tgz", - "integrity": "sha512-mT60Dfqfym9LisGcFEUV/ZwCWrcd/sI24ACAUr7D/gCMX2GuJHC7qrRwWVjGDaaDMVhDM5eCi6GKPjQhs0Ckmw==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.88.0.tgz", + "integrity": "sha512-wGdKNhA5WGwegJ6spTfPxg9te5dyAUDQLArTa0wesFtpVV5cXg9jVifSCmuFHJBTzBHLF3xyAbJNe4luq8QW9Q==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-ns-api-design-systems": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.83.0.tgz", - "integrity": "sha512-ahkhB8QIQhos0g2WRAPb7d3HRPP4FgaPTq81Fd3IeCy1pqsRrMhBOHBt3aksOmSvCrHScXHiIU0OBsGA+vt1CA==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.88.0.tgz", + "integrity": "sha512-JELQajWJOYGAnx7T3k33v8HQlIkmHmgfiCNarHTCV6i1mImJDRTPTYvvyPMWzzyx9JdQR1u47ZFb7b7I33k7vg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-asyncapi-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.83.0.tgz", - "integrity": "sha512-A53C93GXcB9D7XSZRzEHv2k+GSa7nl7agN364sFFxS4Q/CtwNQiKVkpMCc5nG7/jUJOgj9BgevBR2p5kgYzH8Q==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.88.0.tgz", + "integrity": "sha512-80025KRDyRMgHFSZt8LT1S0wVK6VkzMKec0w4u1vrnjcC9lcAWmV1Ojuur6g2afEbn4Avv9bcUb6uPb9U3NEYA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-json-schema-draft-7": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-json-schema-draft-7": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-4": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.83.0.tgz", - "integrity": "sha512-boknhIfrXF1k9IxLV0CkO1EoeXed4mzDNbFNKTkIv7UAdFwAa7NiQLVlEehNY3Ufm3/PjVMzYVQ80tUbyQE2Sw==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.88.0.tgz", + "integrity": "sha512-m0h+HMUTKZ7MNJMflJwXC0ArFgLntENmIg4pqtPKTcA7Qwij8rJjKzGgHvXBe1ahkVc6uGBulIWOT86hpIxWSg==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.83.0", - "@swagger-api/apidom-core": "^0.83.0", + "@swagger-api/apidom-ast": "^0.88.0", + "@swagger-api/apidom-core": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-6": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.83.0.tgz", - "integrity": "sha512-QP5MJh8hB5eK1+lZlZvUk7H02Oa+Qaq+BPNpAbmV4oG8YLUg98NxyKt+BFVhtfHWa1/i/Cpr3muiNdVIClduxw==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.88.0.tgz", + "integrity": "sha512-ow4rcijuw+DX29Kv4kZS7AaeblmpHv4fxDumKrfv2raQbj4YCew0tK+8LEno4ssIjcHCIOUbbxU1rLAJPtqwyA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-7": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.83.0.tgz", - "integrity": "sha512-+91iNJQ1Oe7Hx7Q306O2JUyp7I1s0FvoZ/8FxiVYtcohGQW21CQ0j8kLv4NrQjHuHRgOquPPUXOEJGcX7s8Zsw==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.88.0.tgz", + "integrity": "sha512-YlNb5Z6vDhVJGsSbXBhJLt5pRiV2tf0fBZH1rkBrzX0Zl0TJYD7X4bAkU/FdZsK1eDhvEYVq8VQHJRrwbqCr4g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", - "@swagger-api/apidom-ns-json-schema-draft-6": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ns-json-schema-draft-6": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-openapi-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.83.0.tgz", - "integrity": "sha512-05/IsGs1dJffvbyaxCXGA5r+tVMJpL+LOwqiKl7hGqUWOC4ku2sA0fLhxiu7fhedxq/Kbqi7ahQMihQhEP0cDQ==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.88.0.tgz", + "integrity": "sha512-QVay4Kh3Z1KV7UScJezdiIBQiBNAhTitAz2XY4U5kpyiifn0Z/KlokeMBk5mpuwWxFo83PPoB+kCTb/Joa3D7g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-openapi-3-0": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.83.0.tgz", - "integrity": "sha512-OAN6buySWrWSvnctKVSxkG5HyUOVc8F87zHy8mxcKn91AaHPC6h8LBxIXcmXFDfZNvORZYTi7GFw3W+mnIMTwg==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.88.0.tgz", + "integrity": "sha512-RrAe32aDT/EZMEVz1kE3dGp5M2eSFMsciQYGBS+SAadaFe8sTgDeKw5J0rPUhcfcrpUnAXHx3EL+37u5JfPJ2w==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-ns-openapi-3-1": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.83.0.tgz", - "integrity": "sha512-xD/T5f9Phqk4/FN5iaH8OM+5AbUqXQV92zdN5twrLCgCCA3l/1PMA7g9qEBTCG3f6UmyJ/6TTFOJyz7utye7Hg==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.88.0.tgz", + "integrity": "sha512-RJl74WxWZjiF1iz/7887Lc0hcjS9EZ+IBTzLaZNhr8VYPJG6vkpUV05YOVYUAyY22CMkP4cYtL9pfVni9pYdkA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.83.0", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.83.0", + "@swagger-api/apidom-ast": "^0.88.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-json": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.83.0.tgz", - "integrity": "sha512-GeMW5pamup8KeaYSbyV2/zMilslIPhQLMf9h9le9JJGJ233ugiBf/y5Vguyj1w1TQXniXztXF43B3A+RNArkmg==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.88.0.tgz", + "integrity": "sha512-sq1RY9hhttG0em6lf0Dj5nGIXQjb0Q3wOGIHsFT4d3FdVoPKMtTbtrSJvaYlETzKrjZCT6HtdmTa7CNtrW/1mA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.83.0", - "@swagger-api/apidom-parser-adapter-json": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-api-design-systems": "^0.88.0", + "@swagger-api/apidom-parser-adapter-json": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-yaml": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.83.0.tgz", - "integrity": "sha512-KYpW/gVfz4SQ4YPmC3x9wnUcOlwah7D4r/S2+FLvEQhf6LoEmKHL1ljcZ1Ma3seWCqMhmS1sKXHWNcYyNtY49A==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.88.0.tgz", + "integrity": "sha512-jeDD+a9Dt+bcgR9AV5NCks02nL4qP6IOeRxtij+jkHkvC70swCS160tUl4D+ID1TWDPJx689weQuVDlIbwBzMg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.83.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-api-design-systems": "^0.88.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-json-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.83.0.tgz", - "integrity": "sha512-iQPDH6uIGRvJTQt6olkVUwndT91fVNrlBH3LybwHbFVLs1CKcQGJQ4lLENGw97YBVp83VO78P20Av5CiGEu80Q==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.88.0.tgz", + "integrity": "sha512-rGze1i5ItUCuaNgy397YKKniQCTAko6Bi3SLSsRXeb+hLU4z3Bwzw/ImEVUqo/uUVNUGNc0tgGcTah0pBfzb1g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-json": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-asyncapi-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-json": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.83.0.tgz", - "integrity": "sha512-Q5UuatTIpYTzdCZH6ZcbT9Pw0MCLzaYzrFM6hdBWusbUriuwT12nTyt3Wer7/6nOcg+ysPTX7lUpxfUMPwT6xA==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.88.0.tgz", + "integrity": "sha512-RqR+vq/P0OkyxvLh1Nvaj88TEwEaNvxCBrkC8BkiS+LM4j4XtxHEFbjisuHCi2ANqvvonNN1ccU1Y1mBJv3D1A==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-asyncapi-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-json": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.83.0.tgz", - "integrity": "sha512-V6KDWP4JuLYaTpd9J8n76kiFP09trJ6PmeVERioPoZn0HpaNh7eFcIFkejFGamQADYPrF6aW6b3A2MmJjTqbMg==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.88.0.tgz", + "integrity": "sha512-JiMOxnYtr7VjyenjdMc9LH6WhgCNH065vROAakTZqZG814J/iM5HoPjdZbm7yyjl0+4OXoDNm6XPIuMxrwAeiA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.83.0", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", + "@swagger-api/apidom-ast": "^0.88.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2", "tree-sitter": "=0.20.4", @@ -1811,107 +1811,107 @@ } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.83.0.tgz", - "integrity": "sha512-bNrD+hpmQINU+hhzgc5VEFp04UJXRf4tKq4XpPrtVBOvZ4uJwmqLVVVNfZqes8OfLt/7ijgxNju6IwruvLeylQ==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.88.0.tgz", + "integrity": "sha512-RpzHNSpvN1ieAeyyKvK24H3vl0OiSfaPbRYTN6BRthab1dPC5vSjuP7ARwY576vldtUTQX8ltBfuRlB2G0NZXQ==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-json": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-json": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-0": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.83.0.tgz", - "integrity": "sha512-UbtCsg+OBbWE1vYXPeNHeLSj+79YHhDtNNPai5NFTcXgPlNhuEOKBeCqq+VBA7sos3amk0lHYUz/UFCDIcR29w==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.88.0.tgz", + "integrity": "sha512-6h01b3QUJ/QNQ8ngNl+edQma3puL2DXIa5rW0VqVOi2e4x0hKbgYdsxx+lE8IV6SVl6t/I+nWKXDjWgZR0GOdA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.83.0", - "@swagger-api/apidom-parser-adapter-json": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", + "@swagger-api/apidom-parser-adapter-json": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-1": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.83.0.tgz", - "integrity": "sha512-+O2m00jNtESw1y+KCubcte61S1SN9Nxda/KaA6yXLsZgjiYAs0HXcPEyjwGbhjHtm6NfexbOdT0poHOYbsvWfQ==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.88.0.tgz", + "integrity": "sha512-r7GKkldiHbL6YAvE6ZUVSW2Sfy70kRHV86esvOj8rQxy2tf7WRsgscjXGY5Gq4BpPWo7lMg31LpBmt5Ahqrrtw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.83.0", - "@swagger-api/apidom-parser-adapter-json": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", + "@swagger-api/apidom-parser-adapter-json": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.83.0.tgz", - "integrity": "sha512-YtU1wSE57yucov8A179TSB5WMJ4X5pxF5ccxW8yNxwVPH3tYkVgh5mPI8zVXQsjWLCSpyhZbiLWT5reYl5Onqw==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.88.0.tgz", + "integrity": "sha512-mFAzDtv9y4oqvaLsKVsJYstpH/6UzKAm2gD4ahj/w5/Gf7lG/3bpSvhzefErUbo8wXG1HcmP2v9AWKrjutyCwg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.83.0.tgz", - "integrity": "sha512-3he5fFM3GS6/WtcVldvWQgW2TFO7S2rWqYMHGASdLLm8E9pzfRw2T30ZymkDuMlC4rqH9zscbJnRFMXQV9OylQ==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.88.0.tgz", + "integrity": "sha512-1RJ5V9QklKV40N/Q8UrfqjVddynjvqi318lYusCLzFOM13cRgGZKKAztasabSEaI886wiV3rwR+EcH1eH+okQg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.83.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.83.0.tgz", - "integrity": "sha512-m8SAWw8fD0QH3SR70NiDzFsJnQjzEREY5v8O8brqs5c/Rz/JtJ2WCDrLHK7eVq/Myapl/ZRJx+/xJbPZckzE0g==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.88.0.tgz", + "integrity": "sha512-5JFGJgAAPrAPLuiGzMK9uRUqulO7pORlSslptfWhfTS68ES1X2TCODhFoH0LBCz1xTd5KhJn/7tRNlrPns0+ow==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.83.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-yaml-1-2": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.83.0.tgz", - "integrity": "sha512-3Pgtz88rxaiW2qg1RC8BUhusHAXe/a+FDNscfa9GHzHMEVZSmeZ13tfhzOW6a4TINmWyO7DNcKtdvlVQAPlmXQ==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.88.0.tgz", + "integrity": "sha512-MGhcLYecAAp3hIx8pKd8QWzDjP9nLF/iM39BLAoDgt4gPduNJDMVtuHJGvFT75MOjWTuLgc6gfYK7VCZhjrmvg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.83.0", - "@swagger-api/apidom-core": "^0.83.0", - "@swagger-api/apidom-error": "^0.83.0", + "@swagger-api/apidom-ast": "^0.88.0", + "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-error": "^0.88.0", "@types/ramda": "~0.29.6", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2", "tree-sitter": "=0.20.4", @@ -1920,39 +1920,39 @@ } }, "node_modules/@swagger-api/apidom-reference": { - "version": "0.83.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.83.0.tgz", - "integrity": "sha512-f7Pm3fQwjf1pqniV+9abkC+oYUAbL/31GCg58r8ou4Cx+5hGTpUg81caMjdeg5Y4+Txj2ZUaAaUYyigEV25i4w==", + "version": "0.88.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.88.0.tgz", + "integrity": "sha512-rNM8j3JAcCWqNxnxFLm8mqqUT6usLXOU3fTrySZhqch2g1nOkZ0wPjhTV87VrovK/rtEdqLHCBwvfluyrxYXEg==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.83.0", + "@swagger-api/apidom-core": "^0.88.0", "@types/ramda": "~0.29.6", "axios": "^1.4.0", "minimatch": "^7.4.3", "process": "^0.11.10", - "ramda": "~0.29.0", + "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", "stampit": "^4.3.2" }, "optionalDependencies": { - "@swagger-api/apidom-error": "^0.83.0", - "@swagger-api/apidom-json-pointer": "^0.83.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.83.0", - "@swagger-api/apidom-ns-openapi-2": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.83.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.83.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.83.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.83.0", - "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-json": "^0.83.0", - "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.83.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.83.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.83.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.83.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.83.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.83.0" + "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-json-pointer": "^0.88.0", + "@swagger-api/apidom-ns-asyncapi-2": "^0.88.0", + "@swagger-api/apidom-ns-openapi-2": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", + "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.88.0", + "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.88.0", + "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-json": "^0.88.0", + "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.88.0", + "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.88.0", + "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.88.0", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.88.0", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.88.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0" } }, "node_modules/@swagger-api/apidom-reference/node_modules/brace-expansion": { @@ -2229,11 +2229,11 @@ "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" }, "node_modules/@types/ramda": { - "version": "0.29.8", - "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.8.tgz", - "integrity": "sha512-CmEF76RSSj4NkgFnuQ4ZK3xeq8wMnE9zQH7sr54Yy/a61WbE1qIzWYVfd7XupLbTJY9jCjgEPbv6fqMlsW8Mvw==", + "version": "0.29.9", + "resolved": "https://registry.npmjs.org/@types/ramda/-/ramda-0.29.9.tgz", + "integrity": "sha512-X3yEG6tQCWBcUAql+RPC/O1Hm9BSU+MXu2wJnCETuAgUlrEDwTA1kIOdEEE4YXDtf0zfQLHa9CCE7WYp9kqPIQ==", "dependencies": { - "types-ramda": "^0.29.5" + "types-ramda": "^0.29.6" } }, "node_modules/@types/react": { @@ -2247,9 +2247,9 @@ } }, "node_modules/@types/react-dom": { - "version": "18.2.17", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.17.tgz", - "integrity": "sha512-rvrT/M7Df5eykWFxn6MYt5Pem/Dbyc1N8Y0S9Mrkw2WFCRiqUgw9P7ul2NpwsXCSM1DVdENzdG9J5SreqfAIWg==", + "version": "18.2.18", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.2.18.tgz", + "integrity": "sha512-TJxDm6OfAX2KJWJdMEVTwWke5Sc/E/RlnPGvGfS0W7+6ocy2xhDVQVh/KvC2Uf7kACs+gDytdusDSdWfWkaNzw==", "devOptional": true, "dependencies": { "@types/react": "*" @@ -2294,16 +2294,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.13.2.tgz", - "integrity": "sha512-3+9OGAWHhk4O1LlcwLBONbdXsAhLjyCFogJY/cWy2lxdVJ2JrcTF2pTGMaLl2AE7U1l31n8Py4a8bx5DLf/0dQ==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.14.0.tgz", + "integrity": "sha512-1ZJBykBCXaSHG94vMMKmiHoL0MhNHKSVlcHVYZNw+BKxufhqQVTOawNpwwI1P5nIFZ/4jLVop0mcY6mJJDFNaw==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.13.2", - "@typescript-eslint/type-utils": "6.13.2", - "@typescript-eslint/utils": "6.13.2", - "@typescript-eslint/visitor-keys": "6.13.2", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/type-utils": "6.14.0", + "@typescript-eslint/utils": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2356,7 +2356,7 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "node_modules/@typescript-eslint/scope-manager": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz", "integrity": "sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg==", @@ -2373,88 +2373,14 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", - "integrity": "sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz", - "integrity": "sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.14.0", - "@typescript-eslint/visitor-keys": "6.14.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", - "integrity": "sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.14.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.13.2.tgz", - "integrity": "sha512-CXQA0xo7z6x13FeDYCgBkjWzNqzBn8RXaE3QVQVIUm74fWJLkJkaHmHdKStrxQllGh6Q4eUGyNpMe0b1hMkXFA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.13.2", - "@typescript-eslint/visitor-keys": "6.13.2" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.13.2.tgz", - "integrity": "sha512-Qr6ssS1GFongzH2qfnWKkAQmMUyZSyOr0W54nZNU1MDfo+U4Mv3XveeLZzadc/yq8iYhQZHYT+eoXJqnACM1tw==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.14.0.tgz", + "integrity": "sha512-x6OC9Q7HfYKqjnuNu5a7kffIYs3No30isapRBJl1iCHLitD8O0lFbRcVGiOcuyN837fqXzPZ1NS10maQzZMKqw==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.13.2", - "@typescript-eslint/utils": "6.13.2", + "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/utils": "6.14.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2475,9 +2401,9 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.13.2.tgz", - "integrity": "sha512-7sxbQ+EMRubQc3wTfTsycgYpSujyVbI1xw+3UMRUcrhSy+pN09y/lWzeKDbvhoqcRbHdc+APLs/PWYi/cisLPg==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", + "integrity": "sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2488,13 +2414,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.13.2.tgz", - "integrity": "sha512-SuD8YLQv6WHnOEtKv8D6HZUzOub855cfPnPMKvdM/Bh1plv1f7Q/0iFUDLKKlxHcEstQnaUU4QZskgQq74t+3w==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz", + "integrity": "sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.13.2", - "@typescript-eslint/visitor-keys": "6.13.2", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/visitor-keys": "6.14.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2515,17 +2441,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.13.2.tgz", - "integrity": "sha512-b9Ptq4eAZUym4idijCRzl61oPCwwREcfDI8xGk751Vhzig5fFZR9CyzDz4Sp/nxSLBYxUPyh4QdIDqWykFhNmQ==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.14.0.tgz", + "integrity": "sha512-XwRTnbvRr7Ey9a1NT6jqdKX8y/atWG+8fAIu3z73HSP8h06i3r/ClMhmaF/RGWGW1tHJEwij1uEg2GbEmPYvYg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.13.2", - "@typescript-eslint/types": "6.13.2", - "@typescript-eslint/typescript-estree": "6.13.2", + "@typescript-eslint/scope-manager": "6.14.0", + "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/typescript-estree": "6.14.0", "semver": "^7.5.4" }, "engines": { @@ -2540,12 +2466,12 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.13.2.tgz", - "integrity": "sha512-OGznFs0eAQXJsp+xSd6k/O1UbFi/K/L7WjqeRoFE7vadjAF9y0uppXhYNQNEqygjou782maGClOoZwPqF0Drlw==", + "version": "6.14.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", + "integrity": "sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.13.2", + "@typescript-eslint/types": "6.14.0", "eslint-visitor-keys": "^3.4.1" }, "engines": { @@ -2686,9 +2612,9 @@ } }, "node_modules/axios": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.1.tgz", - "integrity": "sha512-vfBmhDpKafglh0EldBEbVuoe7DyAavGSLWhuSm5ZSEKQnHhBf0xAAwybbNH1IkrJNGnS/VG4I5yxig1pCEXE4g==", + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", + "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", "dependencies": { "follow-redirects": "^1.15.0", "form-data": "^4.0.0", @@ -2945,9 +2871,9 @@ "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", + "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", "engines": { "node": ">= 0.6" } @@ -2961,9 +2887,9 @@ } }, "node_modules/core-js-pure": { - "version": "3.33.2", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.33.2.tgz", - "integrity": "sha512-a8zeCdyVk7uF2elKIGz67AjcXOxjRbwOLz8SbklEso1V+2DoW4OkAMZN9S9GBgvZIaqQi/OemFX4OiSoQEmg1Q==", + "version": "3.34.0", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.34.0.tgz", + "integrity": "sha512-pmhivkYXkymswFfbXsANmBAewXx86UBfmagP+w0wkK06kLsLlTK5oQmsURPivzMkIBQiYq2cjamcZExIwlFQIg==", "hasInstallScript": true, "funding": { "type": "opencollective", @@ -3186,15 +3112,15 @@ } }, "node_modules/eslint": { - "version": "8.55.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.55.0.tgz", - "integrity": "sha512-iyUUAM0PCKj5QpwGfmCAG9XXbZCWsqP/eWAWrG/W0umvjuLRBECwSFdt+rCntju0xEH7teIABPwXpahftIaTdA==", + "version": "8.56.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", + "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.55.0", + "@eslint/js": "8.56.0", "@humanwhocodes/config-array": "^0.11.13", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -4430,9 +4356,9 @@ "dev": true }, "node_modules/node-abi": { - "version": "3.51.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.51.0.tgz", - "integrity": "sha512-SQkEP4hmNWjlniS5zdnfIXTk1x7Ome85RDzHlTbBtzE97Gfwz/Ipw4v/Ryk20DWIy3yCNVLVlGKApCnmvYoJbA==", + "version": "3.52.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.52.0.tgz", + "integrity": "sha512-JJ98b02z16ILv7859irtXn4oUaFWADtvkzy2c0IAatNVX2Mc9Yoh8z6hZInn3QwvMEYhHuQloYi+TTQy67SIdQ==", "optional": true, "dependencies": { "semver": "^7.3.5" @@ -5628,9 +5554,9 @@ } }, "node_modules/swagger-client": { - "version": "3.24.5", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.24.5.tgz", - "integrity": "sha512-qb4Rr9LpWs7o2AO4KdiIK+dz0GbrRLyD+UyN24h6AcNcDUnwfkb6LgFE4e6bXwVXWJzMp27w1QvSQ4hQNMPnoQ==", + "version": "3.24.6", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.24.6.tgz", + "integrity": "sha512-vgolnwLjsLCLe3mA9yOuXqmslVzxRpjz0fTBWwPtDGvYSU8FMVra0FGevw+N2OQ80UE1rOqgv4Te0AfvzMyR8g==", "dependencies": { "@babel/runtime-corejs3": "^7.22.15", "@swagger-api/apidom-core": ">=0.83.0 <1.0.0", @@ -5638,7 +5564,7 @@ "@swagger-api/apidom-json-pointer": ">=0.83.0 <1.0.0", "@swagger-api/apidom-ns-openapi-3-1": ">=0.83.0 <1.0.0", "@swagger-api/apidom-reference": ">=0.83.0 <1.0.0", - "cookie": "~0.5.0", + "cookie": "~0.6.0", "deepmerge": "~4.3.0", "fast-json-patch": "^3.0.0-1", "is-plain-object": "^5.0.0", @@ -5659,11 +5585,11 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.10.3", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.10.3.tgz", - "integrity": "sha512-AB/ko3xD76wyCFbfb5zihy8Gacg7Lz62umzcmBLC/+VN8twib4ayWNZ48lTRh6Kb9vitvEQCDM/4VS2uTwwy0w==", + "version": "5.10.5", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.10.5.tgz", + "integrity": "sha512-uBQLku4j3L1NC4/xE3HTgz1EcFisBphh8AnGqbj9LMxeGGcpKOlx/ZDigRAeVXWr9jOnZZbeGBzMe4NVHxPZrQ==", "dependencies": { - "@babel/runtime-corejs3": "^7.23.2", + "@babel/runtime-corejs3": "^7.23.5", "@braintree/sanitize-url": "=6.0.4", "base64-js": "^1.5.1", "classnames": "^2.3.1", @@ -5692,7 +5618,7 @@ "reselect": "^4.1.8", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.24.5", + "swagger-client": "^3.24.6", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", @@ -5861,9 +5787,9 @@ } }, "node_modules/types-ramda": { - "version": "0.29.5", - "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.5.tgz", - "integrity": "sha512-u+bAYXHDPJR+amB0qMrMU/NXRB2PG8QqpO2v6j7yK/0mPZhlaaZj++ynYjnVpkPEpCkZEGxNpWY3X7qyLCGE3w==", + "version": "0.29.6", + "resolved": "https://registry.npmjs.org/types-ramda/-/types-ramda-0.29.6.tgz", + "integrity": "sha512-VJoOk1uYNh9ZguGd3eZvqkdhD4hTGtnjRBUx5Zc0U9ftmnCgiWcSj/lsahzKunbiwRje1MxxNkEy1UdcXRCpYw==", "dependencies": { "ts-toolbelt": "^9.6.0" } @@ -5887,9 +5813,9 @@ "integrity": "sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==" }, "node_modules/undici": { - "version": "5.27.2", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.27.2.tgz", - "integrity": "sha512-iS857PdOEy/y3wlM3yRp+6SNQQ6xU0mmZcwRSriqk+et/cwWAtwmIGf6WkoDN2EK/AMdCO/dfXzIwi+rFMrjjQ==", + "version": "5.28.2", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz", + "integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==", "dependencies": { "@fastify/busboy": "^2.0.0" }, diff --git a/playground/package.json b/playground/package.json index 27d34ae436..24268398fc 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,16 +14,16 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.10.3" + "swagger-ui-react": "^5.10.5" }, "devDependencies": { "@types/react": "^18.2.45", - "@types/react-dom": "^18.2.17", + "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.13.2", + "@typescript-eslint/eslint-plugin": "^6.14.0", "@typescript-eslint/parser": "^6.14.0", "@vitejs/plugin-react-swc": "^3.5.0", - "eslint": "^8.55.0", + "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", From 16c34b09016e9076cee5fd84a50ac41732b6c43c Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Tue, 19 Dec 2023 12:26:02 +0100 Subject: [PATCH 26/60] feat: Add unique secondary index (#2131) ## Relevant issue(s) Resolves #298 ## Description Introduces unique secondary indexes. --- cli/index_create.go | 6 +- client/index.go | 2 + db/errors.go | 10 + db/fetcher/indexer.go | 18 +- db/fetcher/indexer_iterators.go | 172 +++++-- db/index.go | 222 ++++++--- db/index_test.go | 35 +- db/indexed_docs_test.go | 129 ++++- .../i2131-rename-schema-root.md | 3 + http/client_collection.go | 2 +- http/handler_collection.go | 8 +- request/graphql/schema/collection.go | 12 + .../{index_test.go => index_parse_test.go} | 77 ++- request/graphql/schema/types/types.go | 1 + tests/clients/cli/wrapper_collection.go | 7 +- tests/gen/cli/util_test.go | 6 +- tests/integration/index/create_drop_test.go | 20 +- tests/integration/index/create_get_test.go | 10 +- tests/integration/index/create_test.go | 38 +- tests/integration/index/create_unique_test.go | 188 +++++++ tests/integration/index/drop_test.go | 20 +- tests/integration/index/get_test.go | 6 +- ...uery_with_unique_index_only_filter_test.go | 463 ++++++++++++++++++ tests/integration/test_case.go | 3 + tests/integration/utils2.go | 1 + 25 files changed, 1270 insertions(+), 189 deletions(-) create mode 100644 docs/data_format_changes/i2131-rename-schema-root.md rename request/graphql/schema/{index_test.go => index_parse_test.go} (80%) create mode 100644 tests/integration/index/create_unique_test.go create mode 100644 tests/integration/index/query_with_unique_index_only_filter_test.go diff --git a/cli/index_create.go b/cli/index_create.go index 42866267fc..099eb7e7a6 100644 --- a/cli/index_create.go +++ b/cli/index_create.go @@ -21,12 +21,14 @@ func MakeIndexCreateCommand() *cobra.Command { var collectionArg string var nameArg string var fieldsArg []string + var uniqueArg bool var cmd = &cobra.Command{ - Use: "create -c --collection --fields [-n --name ]", + Use: "create -c --collection --fields [-n --name ] [--unique]", Short: "Creates a secondary index on a collection's field(s)", Long: `Creates a secondary index on a collection's field(s). The --name flag is optional. If not provided, a name will be generated automatically. +The --unique flag is optional. If provided, the index will be unique. Example: create an index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name @@ -44,6 +46,7 @@ Example: create a named index for 'Users' collection on 'name' field: desc := client.IndexDescription{ Name: nameArg, Fields: fields, + Unique: uniqueArg, } col, err := store.GetCollectionByName(cmd.Context(), collectionArg) if err != nil { @@ -62,6 +65,7 @@ Example: create a named index for 'Users' collection on 'name' field: cmd.Flags().StringVarP(&collectionArg, "collection", "c", "", "Collection name") cmd.Flags().StringVarP(&nameArg, "name", "n", "", "Index name") cmd.Flags().StringSliceVar(&fieldsArg, "fields", []string{}, "Fields to index") + cmd.Flags().BoolVarP(&uniqueArg, "unique", "u", false, "Make the index unique") return cmd } diff --git a/client/index.go b/client/index.go index 69f0362017..5e2d397394 100644 --- a/client/index.go +++ b/client/index.go @@ -36,6 +36,8 @@ type IndexDescription struct { ID uint32 // Fields contains the fields that are being indexed. Fields []IndexedFieldDescription + // Unique indicates whether the index is unique. + Unique bool } // CollectIndexedFields returns all fields that are indexed by all collection indexes. diff --git a/db/errors.go b/db/errors.go index 17e82c6738..1413c1289d 100644 --- a/db/errors.go +++ b/db/errors.go @@ -86,6 +86,7 @@ const ( errExpectedJSONArray string = "expected JSON array" errOneOneAlreadyLinked string = "target document is already linked to another document" errIndexDoesNotMatchName string = "the index used does not match the given name" + errCanNotIndexNonUniqueField string = "can not create doc that violates unique index" ) var ( @@ -631,3 +632,12 @@ func NewErrIndexDoesNotMatchName(index, name string) error { errors.NewKV("Name", name), ) } + +func NewErrCanNotIndexNonUniqueField(dockey, fieldName string, value any) error { + return errors.New( + errCanNotIndexNonUniqueField, + errors.NewKV("Dockey", dockey), + errors.NewKV("Field name", fieldName), + errors.NewKV("Field value", value), + ) +} diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index a0ee94d0b9..6b4833d00f 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -32,6 +32,7 @@ type IndexFetcher struct { mapping *core.DocumentMapping indexedField client.FieldDescription docFields []client.FieldDescription + indexDesc client.IndexDescription indexIter indexIterator indexDataStoreKey core.IndexDataStoreKey execInfo ExecInfo @@ -70,6 +71,7 @@ func (f *IndexFetcher) Init( for _, index := range col.Description().Indexes { if index.Fields[0].Name == f.indexedField.Name { + f.indexDesc = index f.indexDataStoreKey.IndexID = index.ID break } @@ -84,7 +86,7 @@ func (f *IndexFetcher) Init( } } - iter, err := createIndexIterator(f.indexDataStoreKey, f.indexFilter, &f.execInfo) + iter, err := createIndexIterator(f.indexDataStoreKey, f.indexFilter, &f.execInfo, f.indexDesc.Unique) if err != nil { return err } @@ -112,28 +114,32 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo for { f.doc.Reset() - indexKey, hasValue, err := f.indexIter.Next() + res, err := f.indexIter.Next() if err != nil { return nil, ExecInfo{}, err } - if !hasValue { + if !res.foundKey { return nil, f.execInfo, nil } property := &encProperty{ Desc: f.indexedField, - Raw: indexKey.FieldValues[0], + Raw: res.key.FieldValues[0], } - f.doc.key = indexKey.FieldValues[1] + if f.indexDesc.Unique { + f.doc.key = res.value + } else { + f.doc.key = res.key.FieldValues[1] + } f.doc.properties[f.indexedField] = property f.execInfo.FieldsFetched++ if f.docFetcher != nil && len(f.docFields) > 0 { targetKey := base.MakeDocKey(f.col.Description(), string(f.doc.key)) spans := core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd())) - err = f.docFetcher.Start(ctx, spans) + err := f.docFetcher.Start(ctx, spans) if err != nil { return nil, ExecInfo{}, err } diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index b563c9b3a3..3b2bd1f996 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -45,45 +45,51 @@ const ( // For example, iteration over condition _eq and _gt will have completely different logic. type indexIterator interface { Init(context.Context, datastore.DSReaderWriter) error - Next() (core.IndexDataStoreKey, bool, error) + Next() (indexIterResult, error) Close() error } +type indexIterResult struct { + key core.IndexDataStoreKey + foundKey bool + value []byte +} + type queryResultIterator struct { resultIter query.Results } -func (i queryResultIterator) Next() (core.IndexDataStoreKey, bool, error) { +func (i *queryResultIterator) Next() (indexIterResult, error) { res, hasVal := i.resultIter.NextSync() if res.Error != nil { - return core.IndexDataStoreKey{}, false, res.Error + return indexIterResult{}, res.Error } if !hasVal { - return core.IndexDataStoreKey{}, false, nil + return indexIterResult{}, nil } key, err := core.NewIndexDataStoreKey(res.Key) if err != nil { - return core.IndexDataStoreKey{}, false, err + return indexIterResult{}, err } - return key, true, nil + return indexIterResult{key: key, value: res.Value, foundKey: true}, nil } -func (i queryResultIterator) Close() error { +func (i *queryResultIterator) Close() error { return i.resultIter.Close() } -type eqIndexIterator struct { +type eqPrefixIndexIterator struct { + filterValueHolder + indexKey core.IndexDataStoreKey + execInfo *ExecInfo + queryResultIterator - indexKey core.IndexDataStoreKey - filterVal []byte - execInfo *ExecInfo } -func (i *eqIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { - i.indexKey.FieldValues = [][]byte{i.filterVal} +func (i *eqPrefixIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { + i.indexKey.FieldValues = [][]byte{i.value} resultIter, err := store.Query(ctx, query.Query{ - Prefix: i.indexKey.ToString(), - KeysOnly: true, + Prefix: i.indexKey.ToString(), }) if err != nil { return err @@ -92,16 +98,62 @@ func (i *eqIndexIterator) Init(ctx context.Context, store datastore.DSReaderWrit return nil } -func (i *eqIndexIterator) Next() (core.IndexDataStoreKey, bool, error) { - key, hasValue, err := i.queryResultIterator.Next() - if hasValue { +func (i *eqPrefixIndexIterator) Next() (indexIterResult, error) { + res, err := i.queryResultIterator.Next() + if res.foundKey { i.execInfo.IndexesFetched++ } - return key, hasValue, err + return res, err +} + +type filterValueIndexIterator interface { + indexIterator + SetFilterValue([]byte) +} + +type filterValueHolder struct { + value []byte +} + +func (h *filterValueHolder) SetFilterValue(value []byte) { + h.value = value +} + +type eqSingleIndexIterator struct { + filterValueHolder + indexKey core.IndexDataStoreKey + execInfo *ExecInfo + + ctx context.Context + store datastore.DSReaderWriter +} + +func (i *eqSingleIndexIterator) Init(ctx context.Context, store datastore.DSReaderWriter) error { + i.ctx = ctx + i.store = store + return nil +} + +func (i *eqSingleIndexIterator) Next() (indexIterResult, error) { + if i.store == nil { + return indexIterResult{}, nil + } + i.indexKey.FieldValues = [][]byte{i.value} + val, err := i.store.Get(i.ctx, i.indexKey.ToDS()) + if err != nil { + return indexIterResult{}, err + } + i.store = nil + i.execInfo.IndexesFetched++ + return indexIterResult{key: i.indexKey, value: val, foundKey: true}, nil +} + +func (i *eqSingleIndexIterator) Close() error { + return nil } type inIndexIterator struct { - eqIndexIterator + filterValueIndexIterator filterValues [][]byte nextValIndex int ctx context.Context @@ -110,22 +162,18 @@ type inIndexIterator struct { } func newInIndexIterator( - indexKey core.IndexDataStoreKey, + indexIter filterValueIndexIterator, filterValues [][]byte, - execInfo *ExecInfo, ) *inIndexIterator { return &inIndexIterator{ - eqIndexIterator: eqIndexIterator{ - indexKey: indexKey, - execInfo: execInfo, - }, - filterValues: filterValues, + filterValueIndexIterator: indexIter, + filterValues: filterValues, } } func (i *inIndexIterator) nextIterator() (bool, error) { if i.nextValIndex > 0 { - err := i.eqIndexIterator.Close() + err := i.filterValueIndexIterator.Close() if err != nil { return false, err } @@ -135,8 +183,8 @@ func (i *inIndexIterator) nextIterator() (bool, error) { return false, nil } - i.filterVal = i.filterValues[i.nextValIndex] - err := i.eqIndexIterator.Init(i.ctx, i.store) + i.SetFilterValue(i.filterValues[i.nextValIndex]) + err := i.filterValueIndexIterator.Init(i.ctx, i.store) if err != nil { return false, err } @@ -152,22 +200,22 @@ func (i *inIndexIterator) Init(ctx context.Context, store datastore.DSReaderWrit return err } -func (i *inIndexIterator) Next() (core.IndexDataStoreKey, bool, error) { +func (i *inIndexIterator) Next() (indexIterResult, error) { for i.hasIterator { - key, hasValue, err := i.eqIndexIterator.Next() + res, err := i.filterValueIndexIterator.Next() if err != nil { - return core.IndexDataStoreKey{}, false, err + return indexIterResult{}, err } - if !hasValue { + if !res.foundKey { i.hasIterator, err = i.nextIterator() if err != nil { - return core.IndexDataStoreKey{}, false, err + return indexIterResult{}, err } continue } - return key, true, nil + return res, nil } - return core.IndexDataStoreKey{}, false, nil + return indexIterResult{}, nil } func (i *inIndexIterator) Close() error { @@ -220,9 +268,8 @@ func (i *scanningIndexIterator) Init(ctx context.Context, store datastore.DSRead i.filter.matcher = &execInfoIndexMatcherDecorator{matcher: i.matcher, execInfo: i.execInfo} iter, err := store.Query(ctx, query.Query{ - Prefix: i.indexKey.ToString(), - KeysOnly: true, - Filters: []query.Filter{&i.filter}, + Prefix: i.indexKey.ToString(), + Filters: []query.Filter{&i.filter}, }) if err != nil { return err @@ -232,12 +279,12 @@ func (i *scanningIndexIterator) Init(ctx context.Context, store datastore.DSRead return nil } -func (i *scanningIndexIterator) Next() (core.IndexDataStoreKey, bool, error) { - key, hasValue, err := i.queryResultIterator.Next() +func (i *scanningIndexIterator) Next() (indexIterResult, error) { + res, err := i.queryResultIterator.Next() if i.filter.err != nil { - return core.IndexDataStoreKey{}, false, i.filter.err + return indexIterResult{}, i.filter.err } - return key, hasValue, err + return res, err } // checks if the stored index value satisfies the condition @@ -348,6 +395,7 @@ func createIndexIterator( indexDataStoreKey core.IndexDataStoreKey, indexFilterConditions *mapper.Filter, execInfo *ExecInfo, + isUnique bool, ) (indexIterator, error) { var op string var filterVal any @@ -373,11 +421,23 @@ func createIndexIterator( switch op { case opEq: - return &eqIndexIterator{ - indexKey: indexDataStoreKey, - filterVal: valueBytes, - execInfo: execInfo, - }, nil + if isUnique { + return &eqSingleIndexIterator{ + indexKey: indexDataStoreKey, + filterValueHolder: filterValueHolder{ + value: valueBytes, + }, + execInfo: execInfo, + }, nil + } else { + return &eqPrefixIndexIterator{ + indexKey: indexDataStoreKey, + filterValueHolder: filterValueHolder{ + value: valueBytes, + }, + execInfo: execInfo, + }, nil + } case opGt: return &scanningIndexIterator{ indexKey: indexDataStoreKey, @@ -438,7 +498,19 @@ func createIndexIterator( valArr = append(valArr, valueBytes) } if op == opIn { - return newInIndexIterator(indexDataStoreKey, valArr, execInfo), nil + var iter filterValueIndexIterator + if isUnique { + iter = &eqSingleIndexIterator{ + indexKey: indexDataStoreKey, + execInfo: execInfo, + } + } else { + iter = &eqPrefixIndexIterator{ + indexKey: indexDataStoreKey, + execInfo: execInfo, + } + } + return newInIndexIterator(iter, valArr), nil } else { return &scanningIndexIterator{ indexKey: indexDataStoreKey, diff --git a/db/index.go b/db/index.go index 5d43bddb21..804eac492e 100644 --- a/db/index.go +++ b/db/index.go @@ -90,44 +90,32 @@ func NewCollectionIndex( if len(desc.Fields) == 0 { return nil, NewErrIndexDescHasNoFields(desc) } - index := &collectionSimpleIndex{collection: collection, desc: desc} field, foundField := collection.Schema().GetField(desc.Fields[0].Name) if !foundField { return nil, NewErrIndexDescHasNonExistingField(desc, desc.Fields[0].Name) } - var e error - index.fieldDesc = field - index.validateFieldFunc, e = getFieldValidateFunc(field.Kind) - return index, e + base := collectionBaseIndex{collection: collection, desc: desc} + base.fieldDesc = field + var err error + base.validateFieldFunc, err = getFieldValidateFunc(field.Kind) + if err != nil { + return nil, err + } + if desc.Unique { + return &collectionUniqueIndex{collectionBaseIndex: base}, nil + } else { + return &collectionSimpleIndex{collectionBaseIndex: base}, nil + } } -// collectionSimpleIndex is an non-unique index that indexes documents by a single field. -// Single-field indexes store values only in ascending order. -type collectionSimpleIndex struct { +type collectionBaseIndex struct { collection client.Collection desc client.IndexDescription validateFieldFunc func(any) bool fieldDesc client.FieldDescription } -var _ CollectionIndex = (*collectionSimpleIndex)(nil) - -func (i *collectionSimpleIndex) getDocumentsIndexKey( - doc *client.Document, -) (core.IndexDataStoreKey, error) { - fieldValue, err := i.getDocFieldValue(doc) - if err != nil { - return core.IndexDataStoreKey{}, err - } - - indexDataStoreKey := core.IndexDataStoreKey{} - indexDataStoreKey.CollectionID = i.collection.ID() - indexDataStoreKey.IndexID = i.desc.ID - indexDataStoreKey.FieldValues = [][]byte{fieldValue, []byte(doc.Key().String())} - return indexDataStoreKey, nil -} - -func (i *collectionSimpleIndex) getDocFieldValue(doc *client.Document) ([]byte, error) { +func (i *collectionBaseIndex) getDocFieldValue(doc *client.Document) ([]byte, error) { // collectionSimpleIndex only supports single field indexes, that's why we // can safely access the first field indexedFieldName := i.desc.Fields[0].Name @@ -146,35 +134,26 @@ func (i *collectionSimpleIndex) getDocFieldValue(doc *client.Document) ([]byte, return writeableVal.Bytes() } -// Save indexes a document by storing the indexed field value. -func (i *collectionSimpleIndex) Save( - ctx context.Context, - txn datastore.Txn, +func (i *collectionBaseIndex) getDocumentsIndexKey( doc *client.Document, -) error { - key, err := i.getDocumentsIndexKey(doc) - if err != nil { - return err - } - err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) +) (core.IndexDataStoreKey, error) { + fieldValue, err := i.getDocFieldValue(doc) if err != nil { - return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) + return core.IndexDataStoreKey{}, err } - return nil + + indexDataStoreKey := core.IndexDataStoreKey{} + indexDataStoreKey.CollectionID = i.collection.ID() + indexDataStoreKey.IndexID = i.desc.ID + indexDataStoreKey.FieldValues = [][]byte{fieldValue} + return indexDataStoreKey, nil } -// Update updates indexed field values of an existing document. -// It removes the old document from the index and adds the new one. -func (i *collectionSimpleIndex) Update( +func (i *collectionBaseIndex) deleteIndexKey( ctx context.Context, txn datastore.Txn, - oldDoc *client.Document, - newDoc *client.Document, + key core.IndexDataStoreKey, ) error { - key, err := i.getDocumentsIndexKey(oldDoc) - if err != nil { - return err - } exists, err := txn.Datastore().Has(ctx, key.ToDS()) if err != nil { return err @@ -182,16 +161,12 @@ func (i *collectionSimpleIndex) Update( if !exists { return NewErrCorruptedIndex(i.desc.Name) } - err = txn.Datastore().Delete(ctx, key.ToDS()) - if err != nil { - return err - } - return i.Save(ctx, txn, newDoc) + return txn.Datastore().Delete(ctx, key.ToDS()) } // RemoveAll remove all artifacts of the index from the storage, i.e. all index // field values for all documents. -func (i *collectionSimpleIndex) RemoveAll(ctx context.Context, txn datastore.Txn) error { +func (i *collectionBaseIndex) RemoveAll(ctx context.Context, txn datastore.Txn) error { prefixKey := core.IndexDataStoreKey{} prefixKey.CollectionID = i.collection.ID() prefixKey.IndexID = i.desc.ID @@ -212,11 +187,148 @@ func (i *collectionSimpleIndex) RemoveAll(ctx context.Context, txn datastore.Txn } // Name returns the name of the index -func (i *collectionSimpleIndex) Name() string { +func (i *collectionBaseIndex) Name() string { return i.desc.Name } // Description returns the description of the index -func (i *collectionSimpleIndex) Description() client.IndexDescription { +func (i *collectionBaseIndex) Description() client.IndexDescription { return i.desc } + +// collectionSimpleIndex is an non-unique index that indexes documents by a single field. +// Single-field indexes store values only in ascending order. +type collectionSimpleIndex struct { + collectionBaseIndex +} + +var _ CollectionIndex = (*collectionSimpleIndex)(nil) + +func (i *collectionSimpleIndex) getDocumentsIndexKey( + doc *client.Document, +) (core.IndexDataStoreKey, error) { + key, err := i.collectionBaseIndex.getDocumentsIndexKey(doc) + if err != nil { + return core.IndexDataStoreKey{}, err + } + + key.FieldValues = append(key.FieldValues, []byte(doc.Key().String())) + return key, nil +} + +// Save indexes a document by storing the indexed field value. +func (i *collectionSimpleIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + err = txn.Datastore().Put(ctx, key.ToDS(), []byte{}) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) + } + return nil +} + +func (i *collectionSimpleIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + err := i.deleteDocIndex(ctx, txn, oldDoc) + if err != nil { + return err + } + return i.Save(ctx, txn, newDoc) +} + +func (i *collectionSimpleIndex) deleteDocIndex( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + return i.deleteIndexKey(ctx, txn, key) +} + +type collectionUniqueIndex struct { + collectionBaseIndex +} + +var _ CollectionIndex = (*collectionUniqueIndex)(nil) + +func (i *collectionUniqueIndex) Save( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + exists, err := txn.Datastore().Has(ctx, key.ToDS()) + if err != nil { + return err + } + if exists { + return i.newUniqueIndexError(doc) + } + err = txn.Datastore().Put(ctx, key.ToDS(), []byte(doc.Key().String())) + if err != nil { + return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) + } + return nil +} + +func (i *collectionUniqueIndex) newUniqueIndexError( + doc *client.Document, +) error { + fieldVal, err := doc.GetValue(i.fieldDesc.Name) + if err != nil { + return err + } + return NewErrCanNotIndexNonUniqueField(doc.Key().String(), i.fieldDesc.Name, fieldVal.Value()) +} + +func (i *collectionUniqueIndex) Update( + ctx context.Context, + txn datastore.Txn, + oldDoc *client.Document, + newDoc *client.Document, +) error { + newKey, err := i.getDocumentsIndexKey(newDoc) + if err != nil { + return err + } + exists, err := txn.Datastore().Has(ctx, newKey.ToDS()) + if err != nil { + return err + } + if exists { + return i.newUniqueIndexError(newDoc) + } + err = i.deleteDocIndex(ctx, txn, oldDoc) + if err != nil { + return err + } + return i.Save(ctx, txn, newDoc) +} + +func (i *collectionUniqueIndex) deleteDocIndex( + ctx context.Context, + txn datastore.Txn, + doc *client.Document, +) error { + key, err := i.getDocumentsIndexKey(doc) + if err != nil { + return err + } + return i.deleteIndexKey(ctx, txn, key) +} diff --git a/db/index_test.go b/db/index_test.go index e85fd9bfb4..911228e649 100644 --- a/db/index_test.go +++ b/db/index_test.go @@ -58,7 +58,7 @@ type indexTestFixture struct { t *testing.T } -func (f *indexTestFixture) getUsersCollectionDesc() client.Collection { +func (f *indexTestFixture) addUsersCollection() client.Collection { _, err := f.db.AddSchema( f.ctx, fmt.Sprintf( @@ -129,7 +129,7 @@ func newIndexTestFixtureBare(t *testing.T) *indexTestFixture { func newIndexTestFixture(t *testing.T) *indexTestFixture { f := newIndexTestFixtureBare(t) - f.users = f.getUsersCollectionDesc() + f.users = f.addUsersCollection() return f } @@ -178,14 +178,24 @@ func getProductsIndexDescOnCategory() client.IndexDescription { func (f *indexTestFixture) createUserCollectionIndexOnName() client.IndexDescription { newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnName()) require.NoError(f.t, err) - f.commitTxn() + return newDesc +} + +func makeUnique(indexDesc client.IndexDescription) client.IndexDescription { + indexDesc.Unique = true + return indexDesc +} + +func (f *indexTestFixture) createUserCollectionUniqueIndexOnName() client.IndexDescription { + indexDesc := makeUnique(getUsersIndexDescOnName()) + newDesc, err := f.createCollectionIndexFor(f.users.Name(), indexDesc) + require.NoError(f.t, err) return newDesc } func (f *indexTestFixture) createUserCollectionIndexOnAge() client.IndexDescription { newDesc, err := f.createCollectionIndexFor(f.users.Name(), getUsersIndexDescOnAge()) require.NoError(f.t, err) - f.commitTxn() return newDesc } @@ -226,7 +236,11 @@ func (f *indexTestFixture) createCollectionIndexFor( collectionName string, desc client.IndexDescription, ) (client.IndexDescription, error) { - return f.db.createCollectionIndex(f.ctx, f.txn, collectionName, desc) + index, err := f.db.createCollectionIndex(f.ctx, f.txn, collectionName, desc) + if err == nil { + f.commitTxn() + } + return index, err } func (f *indexTestFixture) getAllIndexes() (map[client.CollectionName][]client.IndexDescription, error) { @@ -278,6 +292,7 @@ func TestCreateIndex_IfValidInput_CreateIndex(t *testing.T) { assert.NoError(t, err) assert.Equal(t, desc.Name, resultDesc.Name) assert.Equal(t, desc.Fields, resultDesc.Fields) + assert.Equal(t, desc.Unique, resultDesc.Unique) } func TestCreateIndex_IfFieldNameIsEmpty_ReturnError(t *testing.T) { @@ -414,7 +429,7 @@ func TestCreateIndex_IfPropertyDoesntExist_ReturnError(t *testing.T) { func TestCreateIndex_WithMultipleCollectionsAndIndexes_AssignIncrementedIDPerCollection(t *testing.T) { f := newIndexTestFixtureBare(t) - users := f.getUsersCollectionDesc() + users := f.addUsersCollection() products := f.getProductsCollectionDesc() makeIndex := func(fieldName string) client.IndexDescription { @@ -511,7 +526,6 @@ func TestCreateIndex_IfAttemptToIndexOnUnsupportedType_ReturnError(t *testing.T) _, err = f.createCollectionIndexFor(collection.Name(), indexDesc) require.ErrorIs(f.t, err, NewErrUnsupportedIndexFieldType(unsupportedKind)) - f.commitTxn() } func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { @@ -525,8 +539,6 @@ func TestGetIndexes_ShouldReturnListOfAllExistingIndexes(t *testing.T) { _, err := f.createCollectionIndexFor(usersColName, usersIndexDesc) assert.NoError(t, err) - f.commitTxn() - f.getProductsCollectionDesc() productsIndexDesc := client.IndexDescription{ Name: "products_description_index", @@ -651,8 +663,6 @@ func TestGetCollectionIndexes_ShouldReturnListOfCollectionIndexes(t *testing.T) _, err := f.createCollectionIndexFor(usersColName, usersIndexDesc) assert.NoError(t, err) - f.commitTxn() - f.getProductsCollectionDesc() productsIndexDesc := client.IndexDescription{ Name: "products_description_index", @@ -856,7 +866,7 @@ func TestCollectionGetIndexes_IfFailsToCreateTxn_ShouldNotCache(t *testing.T) { func TestCollectionGetIndexes_IfStoredIndexWithUnsupportedType_ReturnError(t *testing.T) { f := newIndexTestFixtureBare(t) - f.getUsersCollectionDesc() + f.addUsersCollection() const unsupportedKind = client.FieldKind_BOOL_ARRAY _, err := f.db.AddSchema( @@ -1004,7 +1014,6 @@ func TestCollectionGetIndexes_ShouldReturnIndexesInOrderedByName(t *testing.T) { _, err := f.createCollectionIndexFor(collection.Name(), indexDesc) require.NoError(t, err) } - f.commitTxn() indexes, err := collection.GetIndexes(f.ctx) require.NoError(t, err) diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index bb569bdc6c..4110463c09 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -50,6 +50,7 @@ type productDoc struct { func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client.Collection) { err := col.Create(f.ctx, doc) require.NoError(f.t, err) + f.commitTxn() f.txn, err = f.db.NewTxn(f.ctx, false) require.NoError(f.t, err) } @@ -173,7 +174,10 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { fieldBytesVal, err = writeableVal.Bytes() require.NoError(b.f.t, err) - key.FieldValues = [][]byte{fieldBytesVal, []byte(b.doc.Key().String())} + key.FieldValues = [][]byte{fieldBytesVal} + if !b.isUnique { + key.FieldValues = append(key.FieldValues, []byte(b.doc.Key().String())) + } } else if len(b.values) > 0 { key.FieldValues = b.values } @@ -354,7 +358,7 @@ func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) { func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t *testing.T) { f := newIndexTestFixtureBare(t) - users := f.getUsersCollectionDesc() + users := f.addUsersCollection() products := f.getProductsCollectionDesc() _, err := f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnName()) @@ -623,15 +627,16 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) invalidKeyString := fieldKeyString + "/doesn't matter/" // Insert an invalid key within the document prefix, this will generate an error within the fetcher. - f.db.multistore.Datastore().Put(f.ctx, ipfsDatastore.NewKey(invalidKeyString), []byte("doesn't matter")) + err := f.db.multistore.Datastore().Put(f.ctx, ipfsDatastore.NewKey(invalidKeyString), []byte("doesn't matter")) + require.NoError(f.t, err) - _, err := f.users.CreateIndex(f.ctx, getUsersIndexDescOnName()) + _, err = f.users.CreateIndex(f.ctx, getUsersIndexDescOnName()) require.ErrorIs(f.t, err, core.ErrInvalidKey) } func TestNonUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { f := newIndexTestFixtureBare(t) - users := f.getUsersCollectionDesc() + users := f.addUsersCollection() _, err := f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnName()) require.NoError(f.t, err) _, err = f.createCollectionIndexFor(users.Name(), getUsersIndexDescOnAge()) @@ -1004,3 +1009,117 @@ func (encdoc *shimEncodedDocument) Reset() { encdoc.status = 0 encdoc.properties = map[client.FieldDescription]any{} } + +func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + + doc1 := f.newUserDoc("John", 21) + f.saveDocToCollection(doc1, f.users) + doc2 := f.newUserDoc("Islam", 18) + f.saveDocToCollection(doc2, f.users) + + f.createUserCollectionUniqueIndexOnName() + + key1 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc1).Build() + key2 := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc2).Build() + + data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS()) + require.NoError(t, err, key1.ToString()) + assert.Equal(t, data, []byte(doc1.Key().String())) + data, err = f.txn.Datastore().Get(f.ctx, key2.ToDS()) + require.NoError(t, err) + assert.Equal(t, data, []byte(doc2.Key().String())) +} + +func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + f.createUserCollectionUniqueIndexOnName() + + docJSON, err := json.Marshal(struct { + Age int `json:"age"` + }{Age: 44}) + require.NoError(f.t, err) + + doc, err := client.NewDocFromJSON(docJSON) + require.NoError(f.t, err) + + f.saveDocToCollection(doc, f.users) + + key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc). + Values([]byte(nil)).Build() + + data, err := f.txn.Datastore().Get(f.ctx, key.ToDS()) + require.NoError(t, err) + assert.Equal(t, data, []byte(doc.Key().String())) +} + +func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { + f := newIndexTestFixtureBare(t) + users := f.addUsersCollection() + _, err := f.createCollectionIndexFor(users.Name(), makeUnique(getUsersIndexDescOnName())) + require.NoError(f.t, err) + _, err = f.createCollectionIndexFor(users.Name(), makeUnique(getUsersIndexDescOnAge())) + require.NoError(f.t, err) + f.commitTxn() + + f.saveDocToCollection(f.newUserDoc("John", 21), users) + f.saveDocToCollection(f.newUserDoc("Islam", 23), users) + + userNameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Build() + userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Build() + + err = f.dropIndex(usersColName, testUsersColIndexAge) + require.NoError(f.t, err) + + assert.Len(t, f.getPrefixFromDataStore(userNameKey.ToString()), 2) + assert.Len(t, f.getPrefixFromDataStore(userAgeKey.ToString()), 0) +} + +func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { + f := newIndexTestFixture(t) + defer f.db.Close() + f.createUserCollectionUniqueIndexOnName() + + cases := []struct { + Name string + NewValue string + Exec func(doc *client.Document) error + }{ + { + Name: "update", + NewValue: "Islam", + Exec: func(doc *client.Document) error { + return f.users.Update(f.ctx, doc) + }, + }, + { + Name: "save", + NewValue: "Andy", + Exec: func(doc *client.Document) error { + return f.users.Save(f.ctx, doc) + }, + }, + } + + doc := f.newUserDoc("John", 21) + f.saveDocToCollection(doc, f.users) + + for _, tc := range cases { + oldKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).Build() + + err := doc.Set(usersNameFieldName, tc.NewValue) + require.NoError(t, err) + err = tc.Exec(doc) + require.NoError(t, err) + f.commitTxn() + + newKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Unique().Doc(doc).Build() + + _, err = f.txn.Datastore().Get(f.ctx, oldKey.ToDS()) + require.Error(t, err) + _, err = f.txn.Datastore().Get(f.ctx, newKey.ToDS()) + require.NoError(t, err) + } +} diff --git a/docs/data_format_changes/i2131-rename-schema-root.md b/docs/data_format_changes/i2131-rename-schema-root.md new file mode 100644 index 0000000000..f71258c0d8 --- /dev/null +++ b/docs/data_format_changes/i2131-rename-schema-root.md @@ -0,0 +1,3 @@ +# Changed some tests so that they are consistent with others + +Change collection names from "Users" to "User" and made all fields start with lower case letters. \ No newline at end of file diff --git a/http/client_collection.go b/http/client_collection.go index 9f56594db7..35ca21ce4f 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -424,5 +424,5 @@ func (c *Collection) GetIndexes(ctx context.Context) ([]client.IndexDescription, if err := c.http.requestJson(req, &indexes); err != nil { return nil, err } - return c.Description().Indexes, nil + return indexes, nil } diff --git a/http/handler_collection.go b/http/handler_collection.go index a5622f1336..69f08d7073 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -312,13 +312,17 @@ func (s *collectionHandler) CreateIndex(rw http.ResponseWriter, req *http.Reques } func (s *collectionHandler) GetIndexes(rw http.ResponseWriter, req *http.Request) { - col := req.Context().Value(colContextKey).(client.Collection) + store := req.Context().Value(storeContextKey).(client.Store) + indexesMap, err := store.GetAllIndexes(req.Context()) - indexes, err := col.GetIndexes(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } + indexes := make([]client.IndexDescription, 0, len(indexesMap)) + for _, index := range indexesMap { + indexes = append(indexes, index...) + } responseJSON(rw, http.StatusOK, indexes) } diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index ed77a9d614..fd4c354a32 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -186,6 +186,12 @@ func fieldIndexFromAST(field *ast.FieldDefinition, directive *ast.Directive) (cl if !IsValidIndexName(desc.Name) { return client.IndexDescription{}, NewErrIndexWithInvalidName(desc.Name) } + case types.IndexDirectivePropUnique: + boolVal, ok := arg.Value.(*ast.BooleanValue) + if !ok { + return client.IndexDescription{}, ErrIndexWithInvalidArg + } + desc.Unique = boolVal.Value default: return client.IndexDescription{}, ErrIndexWithUnknownArg } @@ -227,6 +233,12 @@ func indexFromAST(directive *ast.Directive) (client.IndexDescription, error) { if !ok { return client.IndexDescription{}, ErrIndexWithInvalidArg } + case types.IndexDirectivePropUnique: + boolVal, ok := arg.Value.(*ast.BooleanValue) + if !ok { + return client.IndexDescription{}, ErrIndexWithInvalidArg + } + desc.Unique = boolVal.Value default: return client.IndexDescription{}, ErrIndexWithUnknownArg } diff --git a/request/graphql/schema/index_test.go b/request/graphql/schema/index_parse_test.go similarity index 80% rename from request/graphql/schema/index_test.go rename to request/graphql/schema/index_parse_test.go index 155a17fbf6..ca1ce32696 100644 --- a/request/graphql/schema/index_test.go +++ b/request/graphql/schema/index_parse_test.go @@ -19,7 +19,7 @@ import ( "github.com/sourcenetwork/defradb/client" ) -func TestStructIndex(t *testing.T) { +func TestParseIndexOnStruct(t *testing.T) { cases := []indexTestCase{ { description: "Index with a single field", @@ -30,6 +30,7 @@ func TestStructIndex(t *testing.T) { Fields: []client.IndexedFieldDescription{ {Name: "name", Direction: client.Ascending}, }, + Unique: false, }, }, }, @@ -45,6 +46,30 @@ func TestStructIndex(t *testing.T) { }, }, }, + { + description: "Unique index", + sdl: `type user @index(fields: ["name"], unique: true) {}`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: true, + }, + }, + }, + { + description: "Index explicitly not unique", + sdl: `type user @index(fields: ["name"], unique: false) {}`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: false, + }, + }, + }, { description: "Index with explicit ascending field", sdl: `type user @index(fields: ["name"], directions: [ASC]) {}`, @@ -96,11 +121,11 @@ func TestStructIndex(t *testing.T) { } } -func TestInvalidStructIndex(t *testing.T) { +func TestParseInvalidIndexOnStruct(t *testing.T) { cases := []invalidIndexTestCase{ { description: "missing 'fields' argument", - sdl: `type user @index(name: "userIndex") {}`, + sdl: `type user @index(name: "userIndex", unique: true) {}`, expectedErr: errIndexMissingFields, }, { @@ -133,6 +158,11 @@ func TestInvalidStructIndex(t *testing.T) { sdl: `type user @index(name: "user!name", fields: ["name"]) {}`, expectedErr: errIndexInvalidArgument, }, + { + description: "invalid 'unique' value type", + sdl: `type user @index(fields: ["name"], unique: "true") {}`, + expectedErr: errIndexInvalidArgument, + }, { description: "invalid 'fields' value type (not a list)", sdl: `type user @index(fields: "name") {}`, @@ -175,7 +205,7 @@ func TestInvalidStructIndex(t *testing.T) { } } -func TestFieldIndex(t *testing.T) { +func TestParseIndexOnField(t *testing.T) { cases := []indexTestCase{ { description: "field index", @@ -188,6 +218,7 @@ func TestFieldIndex(t *testing.T) { Fields: []client.IndexedFieldDescription{ {Name: "name", Direction: client.Ascending}, }, + Unique: false, }, }, }, @@ -202,6 +233,35 @@ func TestFieldIndex(t *testing.T) { Fields: []client.IndexedFieldDescription{ {Name: "name", Direction: client.Ascending}, }, + Unique: false, + }, + }, + }, + { + description: "unique field index", + sdl: `type user { + name: String @index(unique: true) + }`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: true, + }, + }, + }, + { + description: "field index explicitly not unique", + sdl: `type user { + name: String @index(unique: false) + }`, + targetDescriptions: []client.IndexDescription{ + { + Fields: []client.IndexedFieldDescription{ + {Name: "name", Direction: client.Ascending}, + }, + Unique: false, }, }, }, @@ -212,7 +272,7 @@ func TestFieldIndex(t *testing.T) { } } -func TestInvalidFieldIndex(t *testing.T) { +func TestParseInvalidIndexOnField(t *testing.T) { cases := []invalidIndexTestCase{ { description: "forbidden 'field' argument", @@ -263,6 +323,13 @@ func TestInvalidFieldIndex(t *testing.T) { }`, expectedErr: errIndexInvalidName, }, + { + description: "invalid 'unique' value type", + sdl: `type user { + name: String @index(unique: "true") + }`, + expectedErr: errIndexInvalidArgument, + }, } for _, test := range cases { diff --git a/request/graphql/schema/types/types.go b/request/graphql/schema/types/types.go index c28ef566ea..065dadaa6d 100644 --- a/request/graphql/schema/types/types.go +++ b/request/graphql/schema/types/types.go @@ -26,6 +26,7 @@ const ( IndexDirectiveLabel = "index" IndexDirectivePropName = "name" + IndexDirectivePropUnique = "unique" IndexDirectivePropFields = "fields" IndexDirectivePropDirections = "directions" ) diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index 0ce3c92836..f29135d201 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -366,7 +366,12 @@ func (c *Collection) CreateIndex( ) (index client.IndexDescription, err error) { args := []string{"client", "index", "create"} args = append(args, "--collection", c.Description().Name) - args = append(args, "--name", indexDesc.Name) + if indexDesc.Name != "" { + args = append(args, "--name", indexDesc.Name) + } + if indexDesc.Unique { + args = append(args, "--unique") + } fields := make([]string, len(indexDesc.Fields)) for i := range indexDesc.Fields { diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go index 2e93f7b146..07f027ef7a 100644 --- a/tests/gen/cli/util_test.go +++ b/tests/gen/cli/util_test.go @@ -74,14 +74,14 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { cfg.API.Address = server.AssignedAddr() // run the server in a separate goroutine - go func() { - log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", cfg.API.AddressToURL())) + go func(apiAddress string) { + log.FeedbackInfo(ctx, fmt.Sprintf("Providing HTTP API at %s.", apiAddress)) if err := server.Run(ctx); err != nil && !errors.Is(err, http.ErrServerClosed) { log.FeedbackErrorE(ctx, "Failed to run the HTTP server", err) db.Close() os.Exit(1) } - }() + }(cfg.API.AddressToURL()) return &defraInstance{ db: db, diff --git a/tests/integration/index/create_drop_test.go b/tests/integration/index/create_drop_test.go index e9f27bfe5e..0680ea7aed 100644 --- a/tests/integration/index/create_drop_test.go +++ b/tests/integration/index/create_drop_test.go @@ -22,9 +22,9 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { - Name: String @index - Age: Int + type User { + name: String @index + age: Int } `, }, @@ -33,8 +33,8 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.DropIndex{ @@ -44,15 +44,15 @@ func TestIndexDrop_ShouldNotHinderQuerying(t *testing.T) { testUtils.Request{ Request: ` query { - Users { - Name - Age + User { + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, diff --git a/tests/integration/index/create_get_test.go b/tests/integration/index/create_get_test.go index 2e758bb637..6ec0962c17 100644 --- a/tests/integration/index/create_get_test.go +++ b/tests/integration/index/create_get_test.go @@ -23,9 +23,9 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users @index(name: "age_index", fields: ["Age"]) { - Name: String @index(name: "name_index") - Age: Int + type User @index(name: "age_index", fields: ["age"]) { + name: String @index(name: "name_index") + age: Int } `, }, @@ -37,7 +37,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { ID: 1, Fields: []client.IndexedFieldDescription{ { - Name: "Name", + Name: "name", Direction: client.Ascending, }, }, @@ -47,7 +47,7 @@ func TestIndexGet_ShouldReturnListOfExistingIndexes(t *testing.T) { ID: 2, Fields: []client.IndexedFieldDescription{ { - Name: "Age", + Name: "age", Direction: client.Ascending, }, }, diff --git a/tests/integration/index/create_test.go b/tests/integration/index/create_test.go index 692b329079..ce3f94080a 100644 --- a/tests/integration/index/create_test.go +++ b/tests/integration/index/create_test.go @@ -24,8 +24,8 @@ func TestIndexCreateWithCollection_ShouldNotHinderQuerying(t *testing.T) { testUtils.SchemaUpdate{ Schema: ` type Users { - Name: String @index - Age: Int + name: String @index + age: Int } `, }, @@ -34,22 +34,22 @@ func TestIndexCreateWithCollection_ShouldNotHinderQuerying(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.Request{ Request: ` query { Users { - Name - Age + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, @@ -65,9 +65,9 @@ func TestIndexCreate_ShouldNotHinderQuerying(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { - Name: String - Age: Int + type User { + name: String + age: Int } `, }, @@ -76,27 +76,27 @@ func TestIndexCreate_ShouldNotHinderQuerying(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.CreateIndex{ CollectionID: 0, IndexName: "some_index", - FieldName: "Name", + FieldName: "name", }, testUtils.Request{ Request: ` query { - Users { - Name - Age + User { + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go new file mode 100644 index 0000000000..0cea5023e6 --- /dev/null +++ b/tests/integration/index/create_unique_test.go @@ -0,0 +1,188 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/db" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +const johnDockey = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" + +func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "If field is not unique, creating of unique index fails", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Andy", + "age": 22 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Shahzad", + "age": 21 + }`, + }, + testUtils.CreateIndex{ + CollectionID: 0, + FieldName: "age", + Unique: true, + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestUniqueIndexCreate_UponAddingDocWithExistingFieldValue_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "adding a new doc with existing value for indexed field should fail", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true, name: "age_unique_index") + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Shahzad", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + }, + testUtils.Request{ + Request: `query { + User(filter: {name: {_eq: "John"}}) { + name + } + }`, + Results: []map[string]any{}, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + Name: "age_unique_index", + ID: 1, + Unique: true, + Fields: []client.IndexedFieldDescription{ + { + Name: "age", + Direction: client.Ascending, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestUniqueIndexCreate_IfFieldValuesAreUnique_Succeed(t *testing.T) { + test := testUtils.TestCase{ + Description: "create unique index if all docs have unique field values", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Shahzad", + "age": 22 + }`, + }, + testUtils.CreateIndex{ + CollectionID: 0, + IndexName: "age_unique_index", + FieldName: "age", + Unique: true, + }, + testUtils.GetIndexes{ + CollectionID: 0, + ExpectedIndexes: []client.IndexDescription{ + { + Name: "age_unique_index", + ID: 1, + Unique: true, + Fields: []client.IndexedFieldDescription{ + { + Name: "age", + Direction: client.Ascending, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/drop_test.go b/tests/integration/index/drop_test.go index ab03e1df50..96e136c332 100644 --- a/tests/integration/index/drop_test.go +++ b/tests/integration/index/drop_test.go @@ -22,9 +22,9 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { - Name: String - Age: Int + type User { + name: String + age: Int } `, }, @@ -33,8 +33,8 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { // bae-52b9170d-b77a-5887-b877-cbdbb99b009f Doc: ` { - "Name": "John", - "Age": 21 + "name": "John", + "age": 21 }`, }, testUtils.DropIndex{ @@ -45,15 +45,15 @@ func TestIndexDrop_IfIndexDoesNotExist_ReturnError(t *testing.T) { testUtils.Request{ Request: ` query { - Users { - Name - Age + User { + name + age } }`, Results: []map[string]any{ { - "Name": "John", - "Age": int64(21), + "name": "John", + "age": int64(21), }, }, }, diff --git a/tests/integration/index/get_test.go b/tests/integration/index/get_test.go index 09308a51cf..f29d9046cc 100644 --- a/tests/integration/index/get_test.go +++ b/tests/integration/index/get_test.go @@ -23,9 +23,9 @@ func TestIndexGet_IfThereAreNoIndexes_ReturnEmptyList(t *testing.T) { Actions: []any{ testUtils.SchemaUpdate{ Schema: ` - type Users { - Name: String - Age: Int + type User { + name: String + age: Int } `, }, diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go new file mode 100644 index 0000000000..54ac7b2d8d --- /dev/null +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -0,0 +1,463 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package index + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestQueryWithUniqueIndex_WithEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {name: {_eq: "Islam"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Islam"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(1).WithIndexFetches(1), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithGreaterThanFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_gt: 48}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _gt filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Chris"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithGreaterOrEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_ge: 48}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _ge filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Keenan"}, + {"name": "Chris"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithLessThanFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_lt: 22}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _lt filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Shahzad"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithLessOrEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_le: 23}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _le filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Shahzad"}, + {"name": "Bruno"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotEqualFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {name: {_ne: "Islam"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _ne filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Roy"}, + {"name": "Addo"}, + {"name": "Andy"}, + {"name": "Fred"}, + {"name": "John"}, + {"name": "Bruno"}, + {"name": "Chris"}, + {"name": "Keenan"}, + {"name": "Shahzad"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(9).WithFieldFetches(9).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithInFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_in: [20, 33]}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _in filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Shahzad"}, + {"name": "Andy"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(2), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotInFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {age: {_nin: [20, 23, 28, 33, 42, 55]}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _nin filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "John"}, + {"name": "Islam"}, + {"name": "Roy"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(4).WithFieldFetches(8).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithLikeFilter_ShouldFetch(t *testing.T) { + req1 := `query { + User(filter: {email: {_like: "a%"}}) { + name + } + }` + req2 := `query { + User(filter: {email: {_like: "%d@gmail.com"}}) { + name + } + }` + req3 := `query { + User(filter: {email: {_like: "%e%"}}) { + name + } + }` + req4 := `query { + User(filter: {email: {_like: "fred@gmail.com"}}) { + name + } + }` + req5 := `query { + User(filter: {email: {_like: "a%@gmail.com"}}) { + name + } + }` + req6 := `query { + User(filter: {email: {_like: "a%com%m"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _like filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + email: String @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req1, + Results: []map[string]any{ + {"name": "Addo"}, + {"name": "Andy"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req1), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req2, + Results: []map[string]any{ + {"name": "Fred"}, + {"name": "Shahzad"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req2), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req3, + Results: []map[string]any{ + {"name": "Fred"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req3), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req4, + Results: []map[string]any{ + {"name": "Fred"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req4), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(1).WithFieldFetches(2).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req5, + Results: []map[string]any{ + {"name": "Addo"}, + {"name": "Andy"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req5), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(2).WithFieldFetches(4).WithIndexFetches(10), + }, + testUtils.Request{ + Request: req6, + Results: []map[string]any{}, + }, + testUtils.Request{ + Request: makeExplainQuery(req6), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(0).WithFieldFetches(0).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestQueryWithUniqueIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) { + req := `query { + User(filter: {name: {_nlike: "%h%"}}) { + name + } + }` + test := testUtils.TestCase{ + Description: "Test index filtering with _nlike filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String @index(unique: true) + age: Int + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.Request{ + Request: req, + Results: []map[string]any{ + {"name": "Roy"}, + {"name": "Addo"}, + {"name": "Andy"}, + {"name": "Fred"}, + {"name": "Bruno"}, + {"name": "Islam"}, + {"name": "Keenan"}, + }, + }, + testUtils.Request{ + Request: makeExplainQuery(req), + Asserter: testUtils.NewExplainAsserter().WithDocFetches(7).WithFieldFetches(7).WithIndexFetches(10), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index 6ebe6242b3..ecb00e602e 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -228,6 +228,9 @@ type CreateIndex struct { // The directions of the 'FieldsNames' to index. Used only for composite indexes. Directions []client.IndexDirection + // If Unique is true, the index will be created as a unique index. + Unique bool + // Any error expected from the action. Optional. // // String can be a partial, and the test will pass if an error is returned that diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index d414cc1ca4..87d8487ccc 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -1319,6 +1319,7 @@ func createIndex( }) } } + indexDesc.Unique = action.Unique err := withRetry( actionNodes, nodeID, From fdaa8d291410dd804126f4dcd82c5383626b1c3a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 22:44:42 -0800 Subject: [PATCH 27/60] bot: Bump golang.org/x/crypto from 0.16.0 to 0.17.0 (#2144) Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.16.0 to 0.17.0.
Commits
  • 9d2ee97 ssh: implement strict KEX protocol changes
  • 4e5a261 ssh: close net.Conn on all NewServerConn errors
  • 152cdb1 x509roots/fallback: update bundle
  • fdfe1f8 ssh: defer channel window adjustment
  • b8ffc16 blake2b: drop Go 1.6, Go 1.8 compatibility
  • 7e6fbd8 ssh: wrap errors from client handshake
  • bda2f3f argon2: avoid clobbering BP
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/crypto&package-manager=go_modules&previous-version=0.16.0&new-version=0.17.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/sourcenetwork/defradb/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index bc0d5bbb38..58769d66f4 100644 --- a/go.mod +++ b/go.mod @@ -46,7 +46,7 @@ require ( go.opentelemetry.io/otel/metric v1.21.0 go.opentelemetry.io/otel/sdk/metric v1.21.0 go.uber.org/zap v1.26.0 - golang.org/x/crypto v0.16.0 + golang.org/x/crypto v0.17.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa golang.org/x/net v0.19.0 google.golang.org/grpc v1.60.0 diff --git a/go.sum b/go.sum index 59a628e26b..1142d0096a 100644 --- a/go.sum +++ b/go.sum @@ -684,8 +684,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= -golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= From 9ab044a8d08314ad812e63697ebd324cb1af81b1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Dec 2023 23:15:44 -0800 Subject: [PATCH 28/60] bot: Bump github.com/spf13/viper from 1.17.0 to 1.18.2 (#2145) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/spf13/viper](https://github.com/spf13/viper) from 1.17.0 to 1.18.2.
Release notes

Sourced from github.com/spf13/viper's releases.

v1.18.2

tl;dr Skip 1.18.0 and 1.18.1 and upgrade to this version instead.

This release fixes a regression that appears in rare circumstances when using Unmarshal or UnmarshalExact to decode values onto pointers with multiple indirection (eg. pointer to a pointer, etc). The change was introduced in 1.18.0 as a means to resolve a long-standing bug when decoding environment variables to structs.

The feature is now disabled by default and can be enabled using the viper_bind_struct build tag. It's also considered experimental at this point, so breaking changes may be introduced in the future.

What's Changed

Bug Fixes 🐛

Full Changelog: https://github.com/spf13/viper/compare/v1.18.1...v1.18.2

v1.18.1

What's Changed

Bug Fixes 🐛

Full Changelog: https://github.com/spf13/viper/compare/v1.18.0...v1.18.1

v1.18.0

Major changes

Highlighting some of the changes for better visibility.

Please share your feedback in the Discussion forum. Thanks! ❤️

AutomaticEnv works with Unmarshal

Previously, environment variables that weren't bound manually or had no defaults could not be mapped by Unmarshal. (The problem is explained in details in this issue: #761)

#1429 introduced a solution that solves that issue.

What's Changed

Enhancements 🚀

Bug Fixes 🐛

Dependency Updates ⬆️

... (truncated)

Commits
  • ab3a50c fix!: hide struct binding behind a feature flag
  • 9154b90 build(deps): bump actions/setup-go from 4.1.0 to 5.0.0
  • 08e4a00 build(deps): bump github/codeql-action from 2.22.8 to 2.22.9
  • fb6eb1e fix: merge missing struct keys inside UnmarshalExact
  • f5fcb4a chore: update crypt
  • f736363 fix isPathShadowedInFlatMap type cast bug (#1585)
  • 36a3868 Review changes
  • f0c4ccd fix: gocritic lint issues
  • 3a23b80 ci: enable test shuffle; fix tests
  • 73dfb94 feat: make Unmarshal work with AutomaticEnv
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/spf13/viper&package-manager=go_modules&previous-version=1.17.0&new-version=1.18.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 14 +-- go.sum | 327 +++------------------------------------------------------ 2 files changed, 21 insertions(+), 320 deletions(-) diff --git a/go.mod b/go.mod index 58769d66f4..479a7bdfad 100644 --- a/go.mod +++ b/go.mod @@ -37,7 +37,7 @@ require ( github.com/sourcenetwork/immutable v0.3.0 github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 - github.com/spf13/viper v1.17.0 + github.com/spf13/viper v1.18.2 github.com/stretchr/testify v1.8.4 github.com/tidwall/btree v1.7.0 github.com/ugorji/go/codec v1.2.12 @@ -72,7 +72,7 @@ require ( github.com/elastic/gosigar v0.14.2 // indirect github.com/flynn/noise v1.0.0 // indirect github.com/francoispqt/gojay v1.2.13 // indirect - github.com/fsnotify/fsnotify v1.6.0 // indirect + github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/go-logr/logr v1.3.0 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect @@ -86,7 +86,7 @@ require ( github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/google/gopacket v1.1.19 // indirect github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b // indirect - github.com/google/uuid v1.3.1 // indirect + github.com/google/uuid v1.4.0 // indirect github.com/gorilla/websocket v1.5.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect @@ -161,12 +161,12 @@ require ( github.com/quic-go/webtransport-go v0.6.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect - github.com/sagikazarmark/locafero v0.3.0 // indirect + github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spaolacci/murmur3 v1.1.0 // indirect - github.com/spf13/afero v1.10.0 // indirect - github.com/spf13/cast v1.5.1 // indirect + github.com/spf13/afero v1.11.0 // indirect + github.com/spf13/cast v1.6.0 // indirect github.com/stretchr/objx v0.5.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect @@ -186,7 +186,7 @@ require ( golang.org/x/text v0.14.0 // indirect golang.org/x/tools v0.15.0 // indirect gonum.org/v1/gonum v0.13.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect diff --git a/go.sum b/go.sum index 1142d0096a..67c6de1d4f 100644 --- a/go.sum +++ b/go.sum @@ -2,50 +2,13 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT cloud.google.com/go v0.31.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.37.0/go.mod h1:TS1dMSSfndXH133OKGwekG838Om/cQT0BUHV3HcBgoo= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= -cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= -cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= dmitri.shuralyov.com/app/changes v0.0.0-20180602232624-0a106ad413e3/go.mod h1:Yl+fi1br7+Rr3LqpNJf1/uxUdtRUV+Tnj0o93V2B9MU= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= dmitri.shuralyov.com/html/belt v0.0.0-20180602232347-f7d459c86be0/go.mod h1:JLBrvjyP0v+ecvNYvCpyZgu5/xkfAUhi6wJj28eUfSU= dmitri.shuralyov.com/service/change v0.0.0-20181023043359-a85b471d5412/go.mod h1:a1inKt/atXimZ4Mv927x+r7UpyzRUf4emIoiiSC2TN4= dmitri.shuralyov.com/state v0.0.0-20180228185332-28bcc343414c/go.mod h1:0PRwlb0D6DFvNNtx+9ybjezNCa8XF0xaYcETyp6rHWU= git.apache.org/thrift.git v0.0.0-20180902110319-2566ecd5d999/go.mod h1:fPE2ZNJGynbRyZ4dJvy6G277gSllfV2HJqblrnkyeyg= github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 h1:cTp8I5+VIoKjsnZuH8vjyaysT/ses3EvZeaV/1UkF2M= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= github.com/Jorropo/jsync v1.0.1 h1:6HgRolFZnsdfzRUj+ImB9og1JYOxQoReSywkHOGSaUU= github.com/Jorropo/jsync v1.0.1/go.mod h1:jCOZj3vrBCri3bSU3ErUYvevKlnbssrXeCivybS5ABQ= @@ -71,14 +34,9 @@ github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/cilium/ebpf v0.2.0/go.mod h1:To2CFviqOWL/M0gIMsvSMlqe7em/l1ALkX1PyjrX2Qs= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/containerd/cgroups v0.0.0-20201119153540-4cbc285b3327/go.mod h1:ZJeTFisyysqgcCdecO57Dj79RfL0LNeGiFUqLYQRYLE= github.com/containerd/cgroups v1.1.0 h1:v8rEWFl6EoqHB+swVNjVoCJE8o3jX7e8nqBGPLaDFBM= github.com/containerd/cgroups v1.1.0/go.mod h1:6ppBcbh/NOOUU+dMKrykgaBnK9lCIBxHqJDGwsa1mIw= @@ -121,8 +79,6 @@ github.com/elastic/gosigar v0.14.2/go.mod h1:iXRIGg2tLnu7LBdpqzyQfGDEidKCfWcCMS0 github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= -github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/evanphx/json-patch/v5 v5.7.0 h1:nJqP7uwL84RJInrohHfW0Fx3awjbm8qZeFv0nW9SYGc= github.com/evanphx/json-patch/v5 v5.7.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ= @@ -134,8 +90,8 @@ github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiD github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= +github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= +github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADivE= github.com/fxamacker/cbor/v2 v2.5.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= github.com/getkin/kin-openapi v0.120.0 h1:MqJcNJFrMDFNc07iwE8iFC5eT2k/NPUFDIpNeiZv8Jg= @@ -149,9 +105,6 @@ github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vz github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= @@ -180,25 +133,15 @@ github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69 github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.1.2 h1:DVjP2PbBOzHyzA+dn3WhHIq4NdVu3Q+pvivFICf/7fo= github.com/golang/glog v1.1.2/go.mod h1:zR+okUeTbrL6EL3xHUDxZuEtGv04p5shwip1+mL/rLQ= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:tluoj9z5200jBnyusfRPU2LqT6J+DAorxEvtC7LHB+E= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= @@ -212,19 +155,15 @@ github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ= @@ -232,29 +171,15 @@ github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF8= github.com/google/gopacket v1.1.19/go.mod h1:iJ8V8n6KS+z2U1A8pUwu8bW5SyEMkXJB8Yo/Vo+TKTo= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= -github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b h1:RMpPgZTSApbPf7xaVel+QkoGPRLFLrwFO89uDUHEGf0= github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= -github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= +github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20190812055157-5d271430af9f h1:KMlcu9X58lhTA/KrfX8Bi1LQSO4pzoVjTiL3h4Jk+Zk= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= @@ -266,8 +191,6 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d h1:dg1dEPuWpEqDnvIw251EVy4zlP8gWbsGj4BsUKCRpYs= github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/golang-lru/arc/v2 v2.0.5 h1:l2zaLDubNhW4XO3LnliVj0GXO3+/CGNJAg1dcN2Fpfw= @@ -282,8 +205,6 @@ github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc= github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= @@ -341,7 +262,6 @@ github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8Hm github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= @@ -353,7 +273,6 @@ github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/4 github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/koron/go-ssdp v0.0.4 h1:1IDwrghSKYM7yLf7XCzbByg2sJ/JcNOZRXS2jczTwz0= github.com/koron/go-ssdp v0.0.4/go.mod h1:oDXq+E5IL5q0U8uSBcoAXzTzInwy5lEgC91HoKtbmZk= -github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -491,7 +410,6 @@ github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0V github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= @@ -526,8 +444,8 @@ github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/sagikazarmark/locafero v0.3.0 h1:zT7VEGWC2DTflmccN/5T1etyKvxSxpHsjb9cJvm4SvQ= -github.com/sagikazarmark/locafero v0.3.0/go.mod h1:w+v7UsPNFwzF1cHuOajOOzoq4U7v/ig1mpRjqV+Bu1U= +github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ= +github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -573,16 +491,16 @@ github.com/sourcenetwork/immutable v0.3.0 h1:gHPtGvLrTBTK5YpDAhMU+u+S8v1F6iYmc3n github.com/sourcenetwork/immutable v0.3.0/go.mod h1:GD7ceuh/HD7z6cdIwzKK2ctzgZ1qqYFJpsFp+8qYnbI= github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI= github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= -github.com/spf13/afero v1.10.0 h1:EaGW2JJh15aKOejeuJ+wpFSHnbd7GE6Wvp3TsNhb6LY= -github.com/spf13/afero v1.10.0/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ= -github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA= -github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48= +github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= +github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= +github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= +github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.17.0 h1:I5txKw7MJasPL/BrfkbA0Jyo/oELqVmux4pR/UxOMfI= -github.com/spf13/viper v1.17.0/go.mod h1:BmMMMLQXSbcHK6KAOiFLz0l5JHrU89OdIRHvsk0+yVI= +github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= +github.com/spf13/viper v1.18.2/go.mod h1:EKmWIqdnk5lOcmR72yw6hS+8OPYcwD0jteitLMVB+yk= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= @@ -626,18 +544,10 @@ github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdz github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= go.opencensus.io v0.18.0/go.mod h1:vKdFvxhtzZ9onBp9VKHK8z/sRpBMnKAsufL7wlDrCOA= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc= @@ -676,52 +586,26 @@ golang.org/x/crypto v0.0.0-20181030102418-4d3f4d9ffa16/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190313024323-a1f597ede03a/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.14.0 h1:dGoOF9QVLYng8IHTm7BAyWqCqSheQ5pYWGhzW00YJr0= golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= @@ -735,49 +619,20 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190313220215-9f648a60d977/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/perf v0.0.0-20180704124530-6e6d33e29852/go.mod h1:JLpeXjPJfIyPr5TlbXLkXWLhP8nz10XfvxElABhCtcw= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -785,10 +640,7 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= @@ -797,72 +649,35 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181029174526-d69651ed3497/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190316082340-a2f829d7f35f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030000716-a0a13e073c7b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -870,55 +685,16 @@ golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= -golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.15.0 h1:zdAyfUGbYmuVokhzVmghFl2ZJh5QhcfebBgmVPFYA+8= golang.org/x/tools v0.15.0/go.mod h1:hpksKq4dtpQWS1uQ61JkdqWM3LscIS6Slf+VVkm+wQk= @@ -931,95 +707,27 @@ gonum.org/v1/gonum v0.13.0/go.mod h1:/WPYRckkfWrhWefxyYTfrTtQR0KH4iyHNuzxqXAKyAU google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= -google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= -google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20181029155118-b69ba1387ce2/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk928CDR8SjdVbjWNpdIf6nzjE3BTgJDr2Atg= google.golang.org/genproto v0.0.0-20190306203927-b5d61aea6440/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97 h1:6GQBEOdGkX6MMTLT9V+TjtIRZCw9VPD5Z+yHY9wMgS0= -google.golang.org/genproto/googleapis/rpc v0.0.0-20231002182017-d307bd883b97/go.mod h1:v7nGkzlmW8P3n/bKmWBn2WpBjpOEx8Q6gMueudAmKfY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f h1:ultW7fxlIvee4HYrtnaRPon9HpEgFk5zYpmfMgtKB5I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f/go.mod h1:L9KNLi232K1/xB6f7AlSX692koaRnKaWSR0stBki0Yc= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= -google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.60.0 h1:6FQAR0kM31P6MRdeluor2w2gPaS4SVNrD/DNTxrQ15k= google.golang.org/grpc v1.60.0/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= @@ -1030,7 +738,6 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= @@ -1062,15 +769,9 @@ grpc.go4.org v0.0.0-20170609214715-11d0a25b4919/go.mod h1:77eQGdRu53HpSqPFJFmuJd honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= lukechampine.com/blake3 v1.2.1 h1:YuqqRuaqsGV71BV/nm9xlI0MKUv4QC54jQnBChWbGnI= lukechampine.com/blake3 v1.2.1/go.mod h1:0OFRp7fBtAylGVCO40o87sbupkyIGgbpv1+M1k1LM6k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= sourcegraph.com/sourcegraph/go-diff v0.5.0/go.mod h1:kuch7UrkMzY0X+p9CRK03kfuPQ2zzQcaEFbx8wA8rck= sourcegraph.com/sqs/pbtypes v0.0.0-20180604144634-d3ebe8f20ae4/go.mod h1:ketZ/q3QxT9HOBeFhu6RdvsftgpsbFHBF5Cas6cDKZ0= From c0c4cbf72f5aeb6a5053ec9449ea8fc44c2a0e57 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 21 Dec 2023 10:42:04 -0500 Subject: [PATCH 29/60] feat: Allow users to add Views (#2114) ## Relevant issue(s) Resolves #2073 ## Description Allows users to add Views. Defined views and embedded schema types are visible when calling GetCollections/Schema etc. --- cli/cli.go | 6 + cli/errors.go | 1 + cli/view.go | 25 ++ cli/view_add.go | 43 ++++ client/db.go | 28 ++ client/descriptions.go | 9 + client/request/select.go | 87 +++++++ db/errors.go | 17 ++ db/txn_db.go | 24 ++ db/view.go | 99 +++++++ http/client.go | 26 ++ http/handler_store.go | 53 ++++ http/openapi.go | 42 +-- planner/datasource.go | 18 +- planner/mapper/errors.go | 5 + planner/mapper/mapper.go | 123 ++++----- planner/operations.go | 1 + planner/planner.go | 3 + planner/select.go | 3 +- planner/view.go | 95 +++++++ request/graphql/schema/collection.go | 63 ++++- request/graphql/schema/generate.go | 101 ++++++-- tests/clients/cli/wrapper.go | 16 ++ tests/clients/http/wrapper.go | 4 + tests/integration/explain.go | 1 + .../explain/debug/with_view_test.go | 71 ++++++ tests/integration/schema/default_fields.go | 9 + tests/integration/test_case.go | 20 ++ tests/integration/utils2.go | 15 ++ .../view/one_to_many/simple_test.go | 241 ++++++++++++++++++ .../view/one_to_many/with_alias_test.go | 169 ++++++++++++ .../view/one_to_many/with_count_test.go | 158 ++++++++++++ .../one_to_many/with_introspection_test.go | 131 ++++++++++ .../view/one_to_one/identical_schema_test.go | 94 +++++++ tests/integration/view/simple/simple_test.go | 162 ++++++++++++ .../view/simple/with_alias_test.go | 65 +++++ .../view/simple/with_filter_test.go | 135 ++++++++++ .../view/simple/with_introspection_test.go | 77 ++++++ 38 files changed, 2120 insertions(+), 120 deletions(-) create mode 100644 cli/view.go create mode 100644 cli/view_add.go create mode 100644 db/view.go create mode 100644 planner/view.go create mode 100644 tests/integration/explain/debug/with_view_test.go create mode 100644 tests/integration/view/one_to_many/simple_test.go create mode 100644 tests/integration/view/one_to_many/with_alias_test.go create mode 100644 tests/integration/view/one_to_many/with_count_test.go create mode 100644 tests/integration/view/one_to_many/with_introspection_test.go create mode 100644 tests/integration/view/one_to_one/identical_schema_test.go create mode 100644 tests/integration/view/simple/simple_test.go create mode 100644 tests/integration/view/simple/with_alias_test.go create mode 100644 tests/integration/view/simple/with_filter_test.go create mode 100644 tests/integration/view/simple/with_introspection_test.go diff --git a/cli/cli.go b/cli/cli.go index 0f93b69633..8827424334 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -63,6 +63,11 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { schema_migrate, ) + view := MakeViewCommand() + view.AddCommand( + MakeViewAddCommand(), + ) + index := MakeIndexCommand() index.AddCommand( MakeIndexCreateCommand(), @@ -98,6 +103,7 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { MakeDumpCommand(), MakeRequestCommand(), schema, + view, index, p2p, backup, diff --git a/cli/errors.go b/cli/errors.go index 937bdd2c9d..ee89a63249 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -27,6 +27,7 @@ var ( ErrNoLensConfig = errors.New("lens config cannot be empty") ErrInvalidLensConfig = errors.New("invalid lens configuration") ErrSchemaVersionNotOfSchema = errors.New(errSchemaVersionNotOfSchema) + ErrViewAddMissingArgs = errors.New("please provide a base query and output SDL for this view") ) func NewErrInvalidLensConfig(inner error) error { diff --git a/cli/view.go b/cli/view.go new file mode 100644 index 0000000000..aaf9b58a4d --- /dev/null +++ b/cli/view.go @@ -0,0 +1,25 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import ( + "github.com/spf13/cobra" +) + +func MakeViewCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "view", + Short: "Manage views within a running DefraDB instance", + Long: "Manage (add) views withing a running DefraDB instance", + } + + return cmd +} diff --git a/cli/view_add.go b/cli/view_add.go new file mode 100644 index 0000000000..46779fb784 --- /dev/null +++ b/cli/view_add.go @@ -0,0 +1,43 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package cli + +import "github.com/spf13/cobra" + +func MakeViewAddCommand() *cobra.Command { + var cmd = &cobra.Command{ + Use: "add [query] [sdl]", + Short: "Add new view", + Long: `Add new database view. + +Example: add from an argument string: + defradb client view add 'Foo { name, ...}' 'type Foo { ... }' + +Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network.`, + RunE: func(cmd *cobra.Command, args []string) error { + store := mustGetStoreContext(cmd) + + if len(args) != 2 { + return ErrViewAddMissingArgs + } + + query := args[0] + sdl := args[1] + + defs, err := store.AddView(cmd.Context(), query, sdl) + if err != nil { + return err + } + return writeJSON(cmd, defs) + }, + } + return cmd +} diff --git a/client/db.go b/client/db.go index 59fb1ddc18..240d2d5dfc 100644 --- a/client/db.go +++ b/client/db.go @@ -123,6 +123,34 @@ type Store interface { // It will return an error if the provided schema version ID does not exist. SetDefaultSchemaVersion(context.Context, string) error + // AddView creates a new Defra View. + // + // It takes a GQL query string, for example: + // + // Author { + // name + // books { + // name + // } + // } + // + // + // A GQL SDL that matches its output type must also be provided. There can only be one `type` declaration, + // any nested objects must be declared as embedded/schema-only types using the `interface` keyword. + // Relations must only be specified on the parent side of the relationship. For example: + // + // type AuthorView { + // name: String + // books: [BookView] + // } + // interface BookView { + // name: String + // } + // + // It will return the collection definitions of the types defined in the SDL if successful, otherwise an error + // will be returned. This function does not execute the given query. + AddView(ctx context.Context, gqlQuery string, sdl string) ([]CollectionDefinition, error) + // SetMigration sets the migration for the given source-destination schema version IDs. Is equivalent to // calling `LensRegistry().SetMigration(ctx, cfg)`. // diff --git a/client/descriptions.go b/client/descriptions.go index 96f68108b4..7ab7cc0982 100644 --- a/client/descriptions.go +++ b/client/descriptions.go @@ -12,6 +12,8 @@ package client import ( "fmt" + + "github.com/sourcenetwork/defradb/client/request" ) // CollectionDescription describes a Collection and all its associated metadata. @@ -30,6 +32,13 @@ type CollectionDescription struct { // The ID of the schema version that this collection is at. SchemaVersionID string + // BaseQuery contains the base query of this view, if this collection is a view. + // + // The query will be saved, and then may be accessed by other actors on demand. Actor defined + // aggregates, filters and other logic (such as LensVM transforms) will execute on top of this + // base query before the result is returned to the actor. + BaseQuery *request.Select + // Indexes contains the secondary indexes that this Collection has. Indexes []IndexDescription } diff --git a/client/request/select.go b/client/request/select.go index fb842228aa..f7d1517dec 100644 --- a/client/request/select.go +++ b/client/request/select.go @@ -11,6 +11,8 @@ package request import ( + "encoding/json" + "github.com/sourcenetwork/immutable" ) @@ -107,3 +109,88 @@ func (s *Select) validateGroupBy() []error { return result } + +// selectJson is a private object used for handling json deserialization +// of `Select` objects. +type selectJson struct { + Field + DocKeys immutable.Option[[]string] + CID immutable.Option[string] + Root SelectionType + Limit immutable.Option[uint64] + Offset immutable.Option[uint64] + OrderBy immutable.Option[OrderBy] + GroupBy immutable.Option[GroupBy] + Filter immutable.Option[Filter] + ShowDeleted bool + + // Properties above this line match the `Select` object and + // are deserialized using the normal/default logic. + // Properties below this line require custom logic in `UnmarshalJSON` + // in order to be deserialized correctly. + + Fields []map[string]json.RawMessage +} + +func (s *Select) UnmarshalJSON(bytes []byte) error { + var selectMap selectJson + err := json.Unmarshal(bytes, &selectMap) + if err != nil { + return err + } + + s.Field = selectMap.Field + s.DocKeys = selectMap.DocKeys + s.CID = selectMap.CID + s.Root = selectMap.Root + s.Limit = selectMap.Limit + s.Offset = selectMap.Offset + s.OrderBy = selectMap.OrderBy + s.GroupBy = selectMap.GroupBy + s.Filter = selectMap.Filter + s.ShowDeleted = selectMap.ShowDeleted + s.Fields = make([]Selection, len(selectMap.Fields)) + + for i, field := range selectMap.Fields { + fieldJson, err := json.Marshal(field) + if err != nil { + return err + } + + var fieldValue Selection + // We detect which concrete type each `Selection` object is by detecting + // non-nillable fields, if the key is present it must be of that type. + // They must be non-nillable as nil values may have their keys omitted from + // the json. This also relies on the fields being unique. We may wish to change + // this later to custom-serialize with a `_type` property. + if _, ok := field["Root"]; ok { + // This must be a Select, as only the `Select` type has a `Root` field + var fieldSelect Select + err := json.Unmarshal(fieldJson, &fieldSelect) + if err != nil { + return err + } + fieldValue = &fieldSelect + } else if _, ok := field["Targets"]; ok { + // This must be an Aggregate, as only the `Aggregate` type has a `Targets` field + var fieldAggregate Aggregate + err := json.Unmarshal(fieldJson, &fieldAggregate) + if err != nil { + return err + } + fieldValue = &fieldAggregate + } else { + // This must be a Field + var fieldField Field + err := json.Unmarshal(fieldJson, &fieldField) + if err != nil { + return err + } + fieldValue = &fieldField + } + + s.Fields[i] = fieldValue + } + + return nil +} diff --git a/db/errors.go b/db/errors.go index 1413c1289d..67f74db296 100644 --- a/db/errors.go +++ b/db/errors.go @@ -87,6 +87,7 @@ const ( errOneOneAlreadyLinked string = "target document is already linked to another document" errIndexDoesNotMatchName string = "the index used does not match the given name" errCanNotIndexNonUniqueField string = "can not create doc that violates unique index" + errInvalidViewQuery string = "the query provided is not valid as a View" ) var ( @@ -165,6 +166,7 @@ var ( ErrExpectedJSONArray = errors.New(errExpectedJSONArray) ErrOneOneAlreadyLinked = errors.New(errOneOneAlreadyLinked) ErrIndexDoesNotMatchName = errors.New(errIndexDoesNotMatchName) + ErrInvalidViewQuery = errors.New(errInvalidViewQuery) ) // NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. @@ -641,3 +643,18 @@ func NewErrCanNotIndexNonUniqueField(dockey, fieldName string, value any) error errors.NewKV("Field value", value), ) } + +func NewErrInvalidViewQueryCastFailed(query string) error { + return errors.New( + errInvalidViewQuery, + errors.NewKV("Query", query), + errors.NewKV("Reason", "Internal errror, cast failed"), + ) +} + +func NewErrInvalidViewQueryMissingQuery() error { + return errors.New( + errInvalidViewQuery, + errors.NewKV("Reason", "No query provided"), + ) +} diff --git a/db/txn_db.go b/db/txn_db.go index 380cfeed34..92f9cde6c1 100644 --- a/db/txn_db.go +++ b/db/txn_db.go @@ -378,6 +378,30 @@ func (db *explicitTxnDB) SetMigration(ctx context.Context, cfg client.LensConfig return db.lensRegistry.SetMigration(ctx, cfg) } +func (db *implicitTxnDB) AddView(ctx context.Context, query string, sdl string) ([]client.CollectionDefinition, error) { + txn, err := db.NewTxn(ctx, false) + if err != nil { + return nil, err + } + defer txn.Discard(ctx) + + defs, err := db.addView(ctx, txn, query, sdl) + if err != nil { + return nil, err + } + + err = txn.Commit(ctx) + if err != nil { + return nil, err + } + + return defs, nil +} + +func (db *explicitTxnDB) AddView(ctx context.Context, query string, sdl string) ([]client.CollectionDefinition, error) { + return db.addView(ctx, db.txn, query, sdl) +} + // BasicImport imports a json dataset. // filepath must be accessible to the node. func (db *implicitTxnDB) BasicImport(ctx context.Context, filepath string) error { diff --git a/db/view.go b/db/view.go new file mode 100644 index 0000000000..dc04c83303 --- /dev/null +++ b/db/view.go @@ -0,0 +1,99 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package db + +import ( + "context" + "errors" + "fmt" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/client/request" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db/description" +) + +func (db *db) addView( + ctx context.Context, + txn datastore.Txn, + inputQuery string, + sdl string, +) ([]client.CollectionDefinition, error) { + // Wrap the given query as part of the GQL query object - this simplifies the syntax for users + // and ensures that we can't be given mutations. In the future this line should disappear along + // with the all calls to the parser appart from `ParseSDL` when we implement the DQL stuff. + query := fmt.Sprintf(`query { %s }`, inputQuery) + + newDefinitions, err := db.parser.ParseSDL(ctx, sdl) + if err != nil { + return nil, err + } + + ast, err := db.parser.BuildRequestAST(query) + if err != nil { + return nil, err + } + + req, errs := db.parser.Parse(ast) + if len(errs) > 0 { + return nil, errors.Join(errs...) + } + + if len(req.Queries) == 0 || len(req.Queries[0].Selections) == 0 { + return nil, NewErrInvalidViewQueryMissingQuery() + } + + baseQuery, ok := req.Queries[0].Selections[0].(*request.Select) + if !ok { + return nil, NewErrInvalidViewQueryCastFailed(inputQuery) + } + + for i := range newDefinitions { + newDefinitions[i].Description.BaseQuery = baseQuery + } + + existingCollections, err := db.getAllCollections(ctx, txn) + if err != nil { + return nil, err + } + + existingDefinitions := make([]client.CollectionDefinition, len(existingCollections)) + for i := range existingCollections { + existingDefinitions[i] = existingCollections[i].Definition() + } + + err = db.parser.SetSchema(ctx, txn, append(existingDefinitions, newDefinitions...)) + if err != nil { + return nil, err + } + + returnDescriptions := make([]client.CollectionDefinition, len(newDefinitions)) + for i, definition := range newDefinitions { + if definition.Description.Name == "" { + schema, err := description.CreateSchemaVersion(ctx, txn, definition.Schema) + if err != nil { + return nil, err + } + returnDescriptions[i] = client.CollectionDefinition{ + // `Collection` is left as default for embedded types + Schema: schema, + } + } else { + col, err := db.createCollection(ctx, txn, definition) + if err != nil { + return nil, err + } + returnDescriptions[i] = col.Definition() + } + } + + return returnDescriptions, nil +} diff --git a/http/client.go b/http/client.go index 148715e877..dc289ceb39 100644 --- a/http/client.go +++ b/http/client.go @@ -163,6 +163,32 @@ func (c *Client) SetDefaultSchemaVersion(ctx context.Context, schemaVersionID st return err } +type addViewRequest struct { + Query string + SDL string +} + +func (c *Client) AddView(ctx context.Context, query string, sdl string) ([]client.CollectionDefinition, error) { + methodURL := c.http.baseURL.JoinPath("view") + + body, err := json.Marshal(addViewRequest{query, sdl}) + if err != nil { + return nil, err + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, methodURL.String(), bytes.NewBuffer(body)) + if err != nil { + return nil, err + } + + var descriptions []client.CollectionDefinition + if err := c.http.requestJson(req, &descriptions); err != nil { + return nil, err + } + + return descriptions, nil +} + func (c *Client) SetMigration(ctx context.Context, config client.LensConfig) error { return c.LensRegistry().SetMigration(ctx, config) } diff --git a/http/handler_store.go b/http/handler_store.go index aadbb37731..0e9f0c2ed2 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -106,6 +106,25 @@ func (s *storeHandler) SetDefaultSchemaVersion(rw http.ResponseWriter, req *http rw.WriteHeader(http.StatusOK) } +func (s *storeHandler) AddView(rw http.ResponseWriter, req *http.Request) { + store := req.Context().Value(storeContextKey).(client.Store) + + var message addViewRequest + err := requestJSON(req, &message) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + defs, err := store.AddView(req.Context(), message.Query, message.SDL) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return + } + + responseJSON(rw, http.StatusOK, defs) +} + func (s *storeHandler) GetCollection(rw http.ResponseWriter, req *http.Request) { store := req.Context().Value(storeContextKey).(client.Store) @@ -326,6 +345,9 @@ func (h *storeHandler) bindRoutes(router *Router) { collectionSchema := &openapi3.SchemaRef{ Ref: "#/components/schemas/collection", } + collectionDefinitionSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/collection_definition", + } schemaSchema := &openapi3.SchemaRef{ Ref: "#/components/schemas/schema", } @@ -338,6 +360,9 @@ func (h *storeHandler) bindRoutes(router *Router) { backupConfigSchema := &openapi3.SchemaRef{ Ref: "#/components/schemas/backup_config", } + addViewSchema := &openapi3.SchemaRef{ + Ref: "#/components/schemas/add_view_request", + } patchSchemaRequestSchema := &openapi3.SchemaRef{ Ref: "#/components/schemas/patch_schema_request", } @@ -449,6 +474,33 @@ func (h *storeHandler) bindRoutes(router *Router) { collectionDescribe.AddResponse(200, collectionsResponse) collectionDescribe.Responses["400"] = errorResponse + collectionDefintionsSchema := openapi3.NewArraySchema() + collectionDefintionsSchema.Items = collectionDefinitionSchema + + addViewResponseSchema := openapi3.NewOneOfSchema() + addViewResponseSchema.OneOf = openapi3.SchemaRefs{ + collectionDefinitionSchema, + openapi3.NewSchemaRef("", collectionDefintionsSchema), + } + + addViewResponse := openapi3.NewResponse(). + WithDescription("The created collection and embedded schemas for the added view."). + WithJSONSchema(addViewResponseSchema) + + addViewRequest := openapi3.NewRequestBody(). + WithRequired(true). + WithJSONSchemaRef(addViewSchema) + + views := openapi3.NewOperation() + views.OperationID = "view" + views.Description = "Manage database views." + views.Tags = []string{"view"} + views.RequestBody = &openapi3.RequestBodyRef{ + Value: addViewRequest, + } + views.AddResponse(200, addViewResponse) + views.Responses["400"] = errorResponse + schemaNameQueryParam := openapi3.NewQueryParameter("name"). WithDescription("Schema name"). WithSchema(openapi3.NewStringSchema()) @@ -521,6 +573,7 @@ func (h *storeHandler) bindRoutes(router *Router) { router.AddRoute("/backup/export", http.MethodPost, backupExport, h.BasicExport) router.AddRoute("/backup/import", http.MethodPost, backupImport, h.BasicImport) router.AddRoute("/collections", http.MethodGet, collectionDescribe, h.GetCollection) + router.AddRoute("/view", http.MethodPost, views, h.AddView) router.AddRoute("/graphql", http.MethodGet, graphQLGet, h.ExecRequest) router.AddRoute("/graphql", http.MethodPost, graphQLPost, h.ExecRequest) router.AddRoute("/debug/dump", http.MethodGet, debugDump, h.PrintDump) diff --git a/http/openapi.go b/http/openapi.go index 4aa217e939..13e2b01f27 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -20,24 +20,26 @@ import ( // openApiSchemas is a mapping of types to auto generate schemas for. var openApiSchemas = map[string]any{ - "error": &errorResponse{}, - "create_tx": &CreateTxResponse{}, - "collection_update": &CollectionUpdateRequest{}, - "collection_delete": &CollectionDeleteRequest{}, - "peer_info": &peer.AddrInfo{}, - "graphql_request": &GraphQLRequest{}, - "graphql_response": &GraphQLResponse{}, - "backup_config": &client.BackupConfig{}, - "collection": &client.CollectionDescription{}, - "schema": &client.SchemaDescription{}, - "index": &client.IndexDescription{}, - "delete_result": &client.DeleteResult{}, - "update_result": &client.UpdateResult{}, - "lens_config": &client.LensConfig{}, - "replicator": &client.Replicator{}, - "ccip_request": &CCIPRequest{}, - "ccip_response": &CCIPResponse{}, - "patch_schema_request": &patchSchemaRequest{}, + "error": &errorResponse{}, + "create_tx": &CreateTxResponse{}, + "collection_update": &CollectionUpdateRequest{}, + "collection_delete": &CollectionDeleteRequest{}, + "peer_info": &peer.AddrInfo{}, + "graphql_request": &GraphQLRequest{}, + "graphql_response": &GraphQLResponse{}, + "backup_config": &client.BackupConfig{}, + "collection": &client.CollectionDescription{}, + "schema": &client.SchemaDescription{}, + "collection_definition": &client.CollectionDefinition{}, + "index": &client.IndexDescription{}, + "delete_result": &client.DeleteResult{}, + "update_result": &client.UpdateResult{}, + "lens_config": &client.LensConfig{}, + "replicator": &client.Replicator{}, + "ccip_request": &CCIPRequest{}, + "ccip_response": &CCIPResponse{}, + "patch_schema_request": &patchSchemaRequest{}, + "add_view_request": &addViewRequest{}, } func NewOpenAPISpec() (*openapi3.T, error) { @@ -114,6 +116,10 @@ func NewOpenAPISpec() (*openapi3.T, error) { Name: "collection", Description: "Add, remove, or update documents", }, + &openapi3.Tag{ + Name: "view", + Description: "Add views", + }, &openapi3.Tag{ Name: "index", Description: "Add, update, or remove indexes", diff --git a/planner/datasource.go b/planner/datasource.go index 72ac7579b4..6cfb8cf728 100644 --- a/planner/datasource.go +++ b/planner/datasource.go @@ -31,13 +31,23 @@ func (p *Planner) getCollectionScanPlan(mapperSelect *mapper.Select) (planSource return planSource{}, err } - scan, err := p.Scan(mapperSelect, col.Description()) - if err != nil { - return planSource{}, err + var plan planNode + if col.Description().BaseQuery != nil { + var err error + plan, err = p.View(mapperSelect, col.Description()) + if err != nil { + return planSource{}, err + } + } else { + var err error + plan, err = p.Scan(mapperSelect, col.Description()) + if err != nil { + return planSource{}, err + } } return planSource{ - plan: scan, + plan: plan, collection: col, }, nil } diff --git a/planner/mapper/errors.go b/planner/mapper/errors.go index 552021ca94..b7477274a1 100644 --- a/planner/mapper/errors.go +++ b/planner/mapper/errors.go @@ -14,6 +14,7 @@ import "github.com/sourcenetwork/defradb/errors" const ( errInvalidFieldToGroupBy string = "invalid field value to groupBy" + errTypeNotFound string = "type not found" ) var ( @@ -27,3 +28,7 @@ var ( func NewErrInvalidFieldToGroupBy(field string) error { return errors.New(errInvalidFieldToGroupBy, errors.NewKV("Field", field)) } + +func NewErrTypeNotFound(name string) error { + return errors.New(errTypeNotFound, errors.NewKV("Type", name)) +} diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 418c0c5c57..3771cb5475 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -23,6 +23,12 @@ import ( "github.com/sourcenetwork/defradb/core" ) +const ( + // topLevelCollectionName is a dummy collection name to indicate that this item is at the outer most + // level of the query, typically an aggregate over an entire collection. + topLevelCollectionName string = "_topLevel" +) + var ( FilterEqOp = &Operator{Operation: "_eq"} ) @@ -52,12 +58,12 @@ func toSelect( return nil, err } - mapping, collection, err := getTopLevelInfo(ctx, store, selectRequest, collectionName) + mapping, schema, err := getTopLevelInfo(ctx, store, selectRequest, collectionName) if err != nil { return nil, err } - fields, aggregates, err := getRequestables(ctx, selectRequest, mapping, collection, store) + fields, aggregates, err := getRequestables(ctx, selectRequest, mapping, collectionName, store) if err != nil { return nil, err } @@ -84,7 +90,8 @@ func toSelect( aggregates, fields, mapping, - collection, + collectionName, + schema, store, ) @@ -92,8 +99,8 @@ func toSelect( return nil, err } - if collection != nil { - fields, err = resolveSecondaryRelationIDs(ctx, store, collection, mapping, fields) + if len(schema.Fields) != 0 { + fields, err = resolveSecondaryRelationIDs(ctx, store, collectionName, schema, mapping, fields) if err != nil { return nil, err } @@ -104,10 +111,7 @@ func toSelect( groupByFields := selectRequest.GroupBy.Value().Fields // Remap all alias field names to use their internal field name mappings. for index, groupByField := range groupByFields { - if collection == nil { - continue - } - fieldDesc, ok := collection.Schema().GetField(groupByField) + fieldDesc, ok := schema.GetField(groupByField) if ok && fieldDesc.IsObject() && !fieldDesc.IsObjectArray() { groupByFields[index] = groupByField + request.RelatedObjectID } else if ok && fieldDesc.IsObjectArray() { @@ -262,7 +266,8 @@ func resolveAggregates( aggregates []*aggregateRequest, inputFields []Requestable, mapping *core.DocumentMapping, - collection client.Collection, + collectionName string, + schema client.SchemaDescription, store client.Store, ) ([]Requestable, error) { fields := inputFields @@ -282,11 +287,7 @@ func resolveAggregates( var hasHost bool var convertedFilter *Filter if childIsMapped { - var fieldDesc client.FieldDescription - var isField bool - if collection != nil { - fieldDesc, isField = collection.Schema().GetField(target.hostExternalName) - } + fieldDesc, isField := schema.GetField(target.hostExternalName) if isField && !fieldDesc.IsObject() { var order *OrderBy @@ -339,9 +340,8 @@ func resolveAggregates( }, } - var collectionName string - if collection != nil { - collectionName = collection.Name() + if collectionName == topLevelCollectionName { + collectionName = "" } childCollectionName, err := getCollectionName(ctx, store, hostSelectRequest, collectionName) @@ -350,12 +350,12 @@ func resolveAggregates( } mapAggregateNestedTargets(target, hostSelectRequest, selectRequest.Root) - childMapping, childDesc, err := getTopLevelInfo(ctx, store, hostSelectRequest, childCollectionName) + childMapping, _, err := getTopLevelInfo(ctx, store, hostSelectRequest, childCollectionName) if err != nil { return nil, err } - childFields, _, err := getRequestables(ctx, hostSelectRequest, childMapping, childDesc, store) + childFields, _, err := getRequestables(ctx, hostSelectRequest, childMapping, childCollectionName, store) if err != nil { return nil, err } @@ -608,7 +608,7 @@ func getRequestables( ctx context.Context, selectRequest *request.Select, mapping *core.DocumentMapping, - collection client.Collection, + collectionName string, store client.Store, ) (fields []Requestable, aggregates []*aggregateRequest, err error) { for _, field := range selectRequest.Fields { @@ -630,12 +630,8 @@ func getRequestables( }) case *request.Select: index := mapping.GetNextIndex() - var parentCollectionName string - if collection != nil { - parentCollectionName = collection.Name() - } - innerSelect, err := toSelect(ctx, store, index, f, parentCollectionName) + innerSelect, err := toSelect(ctx, store, index, f, collectionName) if err != nil { return nil, nil, err } @@ -705,8 +701,7 @@ func getCollectionName( parentCollectionName string, ) (string, error) { if _, isAggregate := request.Aggregates[selectRequest.Name]; isAggregate { - // This string is not used or referenced, its value is only there to aid debugging - return "_topLevel", nil + return topLevelCollectionName, nil } if selectRequest.Name == request.GroupFieldName { @@ -738,25 +733,41 @@ func getTopLevelInfo( store client.Store, selectRequest *request.Select, collectionName string, -) (*core.DocumentMapping, client.Collection, error) { +) (*core.DocumentMapping, client.SchemaDescription, error) { mapping := core.NewDocumentMapping() if _, isAggregate := request.Aggregates[selectRequest.Name]; isAggregate { // If this is a (top-level) aggregate, then it will have no collection // description, and no top-level fields, so we return an empty mapping only - return mapping, nil, nil + return mapping, client.SchemaDescription{}, nil } if selectRequest.Root == request.ObjectSelection { - mapping.Add(core.DocKeyFieldIndex, request.KeyFieldName) - + var schema client.SchemaDescription collection, err := store.GetCollectionByName(ctx, collectionName) if err != nil { - return nil, nil, err + // If the collection is not found, check to see if a schema of that name exists, + // if so, this must be an embedded object. + // + // Note: This is a poor way to check if a collection exists or not, see + // https://github.com/sourcenetwork/defradb/issues/2146 + schemas, err := store.GetSchemasByName(ctx, collectionName) + if err != nil { + return nil, client.SchemaDescription{}, err + } + if len(schemas) == 0 { + return nil, client.SchemaDescription{}, NewErrTypeNotFound(collectionName) + } + // `schemas` will contain all versions of that name, as views cannot be updated atm this should + // be fine for now + schema = schemas[0] + } else { + mapping.Add(core.DocKeyFieldIndex, request.KeyFieldName) + schema = collection.Schema() } // Map all fields from schema into the map as they are fetched automatically - for _, f := range collection.Schema().Fields { + for _, f := range schema.Fields { if f.IsObject() { // Objects are skipped, as they are not fetched by default and // have to be requested via selects. @@ -771,7 +782,7 @@ func getTopLevelInfo( mapping.Add(mapping.GetNextIndex(), request.DeletedFieldName) - return mapping, collection, nil + return mapping, schema, nil } if selectRequest.Name == request.LinksFieldName { @@ -792,7 +803,7 @@ func getTopLevelInfo( mapping.SetTypeName(request.CommitTypeName) } - return mapping, nil, nil + return mapping, client.SchemaDescription{}, nil } func resolveFilterDependencies( @@ -989,7 +1000,8 @@ func constructEmptyJoin( func resolveSecondaryRelationIDs( ctx context.Context, store client.Store, - collection client.Collection, + collectionName string, + schema client.SchemaDescription, mapping *core.DocumentMapping, requestables []Requestable, ) ([]Requestable, error) { @@ -1001,7 +1013,7 @@ func resolveSecondaryRelationIDs( continue } - fieldDesc, descFound := collection.Schema().GetField(existingField.Name) + fieldDesc, descFound := schema.GetField(existingField.Name) if !descFound { continue } @@ -1010,39 +1022,14 @@ func resolveSecondaryRelationIDs( continue } - objectFieldDesc, descFound := collection.Schema().GetField( - strings.TrimSuffix(existingField.Name, request.RelatedObjectID), - ) - if !descFound { - continue - } - - if objectFieldDesc.RelationName == "" { - continue - } + objectFieldName := strings.TrimSuffix(existingField.Name, request.RelatedObjectID) var siblingFound bool for _, siblingRequestable := range requestables { - siblingSelect, isSelect := siblingRequestable.(*Select) - if !isSelect { - continue - } - - siblingFieldDesc, descFound := collection.Schema().GetField(siblingSelect.Field.Name) - if !descFound { - continue - } - - if siblingFieldDesc.RelationName != objectFieldDesc.RelationName { - continue + if siblingRequestable.GetName() == objectFieldName { + siblingFound = true + break } - - if siblingFieldDesc.Kind != client.FieldKind_FOREIGN_OBJECT { - continue - } - - siblingFound = true - break } if !siblingFound { @@ -1052,7 +1039,7 @@ func resolveSecondaryRelationIDs( join, err := constructEmptyJoin( ctx, store, - collection.Name(), + collectionName, mapping, objectFieldName, ) diff --git a/planner/operations.go b/planner/operations.go index f174bb6d45..75d70dcdaf 100644 --- a/planner/operations.go +++ b/planner/operations.go @@ -32,6 +32,7 @@ var ( _ planNode = (*typeJoinOne)(nil) _ planNode = (*updateNode)(nil) _ planNode = (*valuesNode)(nil) + _ planNode = (*viewNode)(nil) _ MultiNode = (*parallelNode)(nil) _ MultiNode = (*topLevelNode)(nil) diff --git a/planner/planner.go b/planner/planner.go index b066e1f0e3..5a87983947 100644 --- a/planner/planner.go +++ b/planner/planner.go @@ -237,6 +237,9 @@ func (p *Planner) expandPlan(planNode planNode, parentPlan *selectTopNode) error case *deleteNode: return p.expandPlan(n.source, parentPlan) + case *viewNode: + return p.expandPlan(n.source, parentPlan) + default: return nil } diff --git a/planner/select.go b/planner/select.go index 20c0dd43ba..11b2ef510b 100644 --- a/planner/select.go +++ b/planner/select.go @@ -373,7 +373,8 @@ func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, erro // commit query link fields are always added and need no special treatment here // WARNING: It is important to check collection name is nil and the parent select name // here else we risk falsely identifying user defined fields with the name `links` as a commit links field - } else { + } else if n.collection.Description().BaseQuery == nil { + // Views only contain embedded objects and don't require a traditional join here err := n.addTypeIndexJoin(f) if err != nil { return nil, err diff --git a/planner/view.go b/planner/view.go new file mode 100644 index 0000000000..7050469ffd --- /dev/null +++ b/planner/view.go @@ -0,0 +1,95 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package planner + +import ( + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/planner/mapper" +) + +// viewNode processes queries to a Defra View constructed from a base query ahead of time. +type viewNode struct { + docMapper + + p *Planner + desc client.CollectionDescription + source planNode +} + +func (p *Planner) View(query *mapper.Select, desc client.CollectionDescription) (*viewNode, error) { + m, err := mapper.ToSelect(p.ctx, p.db, desc.BaseQuery) + if err != nil { + return nil, err + } + + source, err := p.Select(m) + if err != nil { + return nil, err + } + + return &viewNode{ + p: p, + desc: desc, + source: source, + docMapper: docMapper{query.DocumentMapping}, + }, nil +} + +func (n *viewNode) Init() error { + return n.source.Init() +} + +func (n *viewNode) Start() error { + return n.source.Start() +} + +func (n *viewNode) Spans(spans core.Spans) { + n.source.Spans(spans) +} + +func (n *viewNode) Next() (bool, error) { + return n.source.Next() +} + +func (n *viewNode) Value() core.Doc { + sourceValue := n.source.DocumentMap().ToMap(n.source.Value()) + + // We must convert the document from the source mapping (which was constructed using the + // view's base query) to a document using the output mapping (which was constructed using + // the current query and the output schemas). We do this by source output name, which + // will take into account any aliases defined in the base query. + doc := n.docMapper.documentMapping.NewDoc() + for fieldName, fieldValue := range sourceValue { + n.docMapper.documentMapping.SetFirstOfName(&doc, fieldName, fieldValue) + } + + return doc +} + +func (n *viewNode) Source() planNode { + return n.source +} + +func (n *viewNode) Kind() string { + return "viewNode" +} + +func (n *viewNode) Close() error { + if n.source != nil { + err := n.source.Close() + if err != nil { + return err + } + } + + return nil +} diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index fd4c354a32..85f401fd35 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -56,13 +56,27 @@ func fromAst(ctx context.Context, doc *ast.Document) ( for _, def := range doc.Definitions { switch defType := def.(type) { case *ast.ObjectDefinition: - description, err := fromAstDefinition(ctx, relationManager, defType) + description, err := collectionFromAstDefinition(ctx, relationManager, defType) if err != nil { return nil, err } definitions = append(definitions, description) + case *ast.InterfaceDefinition: + description, err := schemaFromAstDefinition(ctx, relationManager, defType) + if err != nil { + return nil, err + } + + definitions = append( + definitions, + client.CollectionDefinition{ + // `Collection` is left as default, as interfaces are schema-only declarations + Schema: description, + }, + ) + default: // Do nothing, ignore it and continue continue @@ -80,8 +94,8 @@ func fromAst(ctx context.Context, doc *ast.Document) ( return definitions, nil } -// fromAstDefinition parses a AST object definition into a set of collection descriptions. -func fromAstDefinition( +// collectionFromAstDefinition parses a AST object definition into a set of collection descriptions. +func collectionFromAstDefinition( ctx context.Context, relationManager *RelationManager, def *ast.ObjectDefinition, @@ -96,7 +110,7 @@ func fromAstDefinition( indexDescriptions := []client.IndexDescription{} for _, field := range def.Fields { - tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def) + tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) if err != nil { return client.CollectionDefinition{}, err } @@ -147,6 +161,33 @@ func fromAstDefinition( }, nil } +func schemaFromAstDefinition( + ctx context.Context, + relationManager *RelationManager, + def *ast.InterfaceDefinition, +) (client.SchemaDescription, error) { + fieldDescriptions := []client.FieldDescription{} + + for _, field := range def.Fields { + tmpFieldsDescriptions, err := fieldsFromAST(field, relationManager, def.Name.Value) + if err != nil { + return client.SchemaDescription{}, err + } + + fieldDescriptions = append(fieldDescriptions, tmpFieldsDescriptions...) + } + + // sort the fields lexicographically + sort.Slice(fieldDescriptions, func(i, j int) bool { + return fieldDescriptions[i].Name < fieldDescriptions[j].Name + }) + + return client.SchemaDescription{ + Name: def.Name.Value, + Fields: fieldDescriptions, + }, nil +} + // IsValidIndexName returns true if the name is a valid index name. // Valid index names must start with a letter or underscore, and can // contain letters, numbers, and underscores. @@ -271,7 +312,7 @@ func indexFromAST(directive *ast.Directive) (client.IndexDescription, error) { func fieldsFromAST(field *ast.FieldDefinition, relationManager *RelationManager, - def *ast.ObjectDefinition, + hostObjectName string, ) ([]client.FieldDescription, error) { kind, err := astTypeToKind(field.Type) if err != nil { @@ -304,7 +345,7 @@ func fieldsFromAST(field *ast.FieldDefinition, relationType = client.Relation_Type_MANY } - relationName, err = getRelationshipName(field, def.Name.Value, schema) + relationName, err = getRelationshipName(field, hostObjectName, schema) if err != nil { return nil, err } @@ -442,6 +483,13 @@ func getRelationshipName( } func finalizeRelations(relationManager *RelationManager, definitions []client.CollectionDefinition) error { + embeddedObjNames := map[string]struct{}{} + for _, def := range definitions { + if def.Description.Name == "" { + embeddedObjNames[def.Schema.Name] = struct{}{} + } + } + for _, definition := range definitions { for i, field := range definition.Schema.Fields { if field.RelationType == 0 || field.RelationType&client.Relation_Type_INTERNAL_ID != 0 { @@ -459,7 +507,8 @@ func finalizeRelations(relationManager *RelationManager, definitions []client.Co } // if not finalized then we are missing one side of the relationship - if !rel.finalized { + // unless this is an embedded object, which only have single-sided relations + if _, ok := embeddedObjNames[field.Schema]; !ok && !rel.finalized { return client.NewErrRelationOneSided(field.Name, field.Schema) } diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index 0d1f40efe1..f76c5623c6 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -98,8 +98,24 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio if err != nil { return nil, err } - queryType.AddFieldConfig(f.Name, f) generatedQueryFields = append(generatedQueryFields, f) + + var isEmbedded bool + for _, definition := range collections { + if t.Name() == definition.Schema.Name && definition.Description.Name == "" { + isEmbedded = true + break + } + } + + // If the object is embedded, it may not be queried directly, so we must not add it + // to the `query` object. We do however need the query-input objects to be generated + // (further up in this block), as they are still required for stuff like grouping. + if isEmbedded { + continue + } + + queryType.AddFieldConfig(f.Name, f) } // resolve types @@ -167,6 +183,34 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio // now let's generate the mutation types. mutationType := g.manager.schema.MutationType() for _, t := range g.typeDefs { + // Note: Whilst the `isReadOnly` code is fairly unpleasent, it will hopefully not live for too much longer + // as we plan to transition to DQL. + var isReadOnly bool + var collectionFound bool + for _, definition := range collections { + if t.Name() == definition.Description.Name { + isReadOnly = definition.Description.BaseQuery != nil + collectionFound = true + break + } + } + if !collectionFound { + // If we did not find a collection with this name, check for matching schemas (embedded objects) + for _, definition := range collections { + if t.Name() == definition.Schema.Name { + // All embedded objects are readonly + isReadOnly = true + collectionFound = true + break + } + } + } + + if isReadOnly { + // We do not currently allow mutation via views, so don't add them to the mutation object + continue + } + fs, err := g.GenerateMutationInputForGQLType(t) if err != nil { return nil, err @@ -365,14 +409,23 @@ func (g *Generator) buildTypes( // will be reassigned before the thunk is run collection := c fieldDescriptions := collection.Schema.Fields + isEmbeddedObject := collection.Description.Name == "" + + var objectName string + if isEmbeddedObject { + // If this is an embedded object, take the type name from the Schema + objectName = collection.Schema.Name + } else { + objectName = collection.Description.Name + } // check if type exists - if _, ok := g.manager.schema.TypeMap()[collection.Description.Name]; ok { - return nil, NewErrSchemaTypeAlreadyExist(collection.Description.Name) + if _, ok := g.manager.schema.TypeMap()[objectName]; ok { + return nil, NewErrSchemaTypeAlreadyExist(objectName) } objconf := gql.ObjectConfig{ - Name: collection.Description.Name, + Name: objectName, } // Wrap field definition in a thunk so we can @@ -381,10 +434,12 @@ func (g *Generator) buildTypes( fieldsThunk := (gql.FieldsThunk)(func() (gql.Fields, error) { fields := gql.Fields{} - // automatically add the _key: ID field to the type - fields[request.KeyFieldName] = &gql.Field{ - Description: keyFieldDescription, - Type: gql.ID, + if !isEmbeddedObject { + // automatically add the _key: ID field to the type + fields[request.KeyFieldName] = &gql.Field{ + Description: keyFieldDescription, + Type: gql.ID, + } } for _, field := range fieldDescriptions { @@ -423,21 +478,9 @@ func (g *Generator) buildTypes( } } - // add _version field - fields[request.VersionFieldName] = &gql.Field{ - Description: versionFieldDescription, - Type: gql.NewList(schemaTypes.CommitObject), - } - - // add _deleted field - fields[request.DeletedFieldName] = &gql.Field{ - Description: deletedFieldDescription, - Type: gql.Boolean, - } - - gqlType, ok := g.manager.schema.TypeMap()[collection.Description.Name] + gqlType, ok := g.manager.schema.TypeMap()[objectName] if !ok { - return nil, NewErrObjectNotFoundDuringThunk(collection.Description.Name) + return nil, NewErrObjectNotFoundDuringThunk(objectName) } fields[request.GroupFieldName] = &gql.Field{ @@ -445,6 +488,20 @@ func (g *Generator) buildTypes( Type: gql.NewList(gqlType), } + if !isEmbeddedObject { + // add _version field + fields[request.VersionFieldName] = &gql.Field{ + Description: versionFieldDescription, + Type: gql.NewList(schemaTypes.CommitObject), + } + + // add _deleted field + fields[request.DeletedFieldName] = &gql.Field{ + Description: deletedFieldDescription, + Type: gql.Boolean, + } + } + return fields, nil }) diff --git a/tests/clients/cli/wrapper.go b/tests/clients/cli/wrapper.go index 43c0aba820..49a0605598 100644 --- a/tests/clients/cli/wrapper.go +++ b/tests/clients/cli/wrapper.go @@ -202,6 +202,22 @@ func (w *Wrapper) SetDefaultSchemaVersion(ctx context.Context, schemaVersionID s return err } +func (w *Wrapper) AddView(ctx context.Context, query string, sdl string) ([]client.CollectionDefinition, error) { + args := []string{"client", "view", "add"} + args = append(args, query) + args = append(args, sdl) + + data, err := w.cmd.execute(ctx, args) + if err != nil { + return nil, err + } + var defs []client.CollectionDefinition + if err := json.Unmarshal(data, &defs); err != nil { + return nil, err + } + return defs, nil +} + func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { return w.LensRegistry().SetMigration(ctx, config) } diff --git a/tests/clients/http/wrapper.go b/tests/clients/http/wrapper.go index ab7975a525..040ab9c1b4 100644 --- a/tests/clients/http/wrapper.go +++ b/tests/clients/http/wrapper.go @@ -103,6 +103,10 @@ func (w *Wrapper) SetDefaultSchemaVersion(ctx context.Context, schemaVersionID s return w.client.SetDefaultSchemaVersion(ctx, schemaVersionID) } +func (w *Wrapper) AddView(ctx context.Context, query string, sdl string) ([]client.CollectionDefinition, error) { + return w.client.AddView(ctx, query, sdl) +} + func (w *Wrapper) SetMigration(ctx context.Context, config client.LensConfig) error { return w.client.SetMigration(ctx, config) } diff --git a/tests/integration/explain.go b/tests/integration/explain.go index da7a1106e2..a8de0e6441 100644 --- a/tests/integration/explain.go +++ b/tests/integration/explain.go @@ -55,6 +55,7 @@ var ( "typeJoinOne": {}, "updateNode": {}, "valuesNode": {}, + "viewNode": {}, } ) diff --git a/tests/integration/explain/debug/with_view_test.go b/tests/integration/explain/debug/with_view_test.go new file mode 100644 index 0000000000..44341ae521 --- /dev/null +++ b/tests/integration/explain/debug/with_view_test.go @@ -0,0 +1,71 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package test_explain_debug + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +var viewPattern = dataMap{ + "explain": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "viewNode": dataMap{ + "selectTopNode": dataMap{ + "selectNode": dataMap{ + "scanNode": dataMap{}, + }, + }, + }, + }, + }, + }, +} + +func TestDebugExplainRequestWithView(t *testing.T) { + test := testUtils.TestCase{ + + Description: "Explain (debug) request with view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.ExplainRequest{ + Request: `query @explain(type: debug) { + UserView { + name + } + }`, + ExpectedPatterns: []dataMap{viewPattern}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 5b3a75de80..96a3b98a56 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -66,6 +66,15 @@ var DefaultFields = concat( aggregateFields, ) +// DefaultEmbeddedObjFields contains the list of fields every +// defra embedded-object should have. +var DefaultEmbeddedObjFields = concat( + fields{ + groupField, + }, + aggregateFields, +) + var keyField = Field{ "name": "_key", "type": map[string]any{ diff --git a/tests/integration/test_case.go b/tests/integration/test_case.go index ecb00e602e..435f1cf9b4 100644 --- a/tests/integration/test_case.go +++ b/tests/integration/test_case.go @@ -130,6 +130,26 @@ type SetDefaultSchemaVersion struct { ExpectedError string } +// CreateView is an action that will create a new View. +type CreateView struct { + // NodeID may hold the ID (index) of a node to create this View on. + // + // If a value is not provided the view will be created on all nodes. + NodeID immutable.Option[int] + + // The query that this View is to be based off of. Required. + Query string + + // The SDL containing all types used by the view output. + SDL string + + // Any error expected from the action. Optional. + // + // String can be a partial, and the test will pass if an error is returned that + // contains this string. + ExpectedError string +} + // CreateDoc will attempt to create the given document in the given collection // using the set [MutationType]. type CreateDoc struct { diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 87d8487ccc..2ae73ddeca 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -263,6 +263,9 @@ func performAction( case SetDefaultSchemaVersion: setDefaultSchemaVersion(s, action) + case CreateView: + createView(s, action) + case ConfigureMigration: configureMigration(s, action) @@ -1052,6 +1055,18 @@ func setDefaultSchemaVersion( refreshIndexes(s) } +func createView( + s *state, + action CreateView, +) { + for _, node := range getNodes(action.NodeID, s.nodes) { + _, err := node.AddView(s.ctx, action.Query, action.SDL) + expectedErrorRaised := AssertError(s.t, s.testCase.Description, err, action.ExpectedError) + + assertExpectedErrorRaised(s.t, s.testCase.Description, action.ExpectedError, expectedErrorRaised) + } +} + // createDoc creates a document using the chosen [mutationType] and caches it in the // test state object. func createDoc( diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go new file mode 100644 index 0000000000..02bb7cb8a5 --- /dev/null +++ b/tests/integration/view/one_to_many/simple_test.go @@ -0,0 +1,241 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_OneToMany(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: `query { + AuthorView { + name + books { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Harper Lee", + "books": []map[string]any{ + { + "name": "To Kill a Mockingbird", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_OneToManyWithMixedSDL_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with mixed sdl errors", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [Book] + } + `, + ExpectedError: "relation must be defined on both schemas. Field: books, Type: Book", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_OneToManyFromInnerSide_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view from inner side", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + testUtils.Request{ + Request: `query { + BookView { + name + author { + name + } + } + }`, + ExpectedError: `Cannot query field "BookView" on type "Query".`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_OneToManyOuterToInnerToOuter_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view from outer to inner to outer", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + author { + name + } + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + testUtils.Request{ + Request: `query { + AuthorView { + name + books { + name + author { + name + } + } + } + }`, + ExpectedError: `Cannot query field "author" on type "BookView".`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_many/with_alias_test.go b/tests/integration/view/one_to_many/with_alias_test.go new file mode 100644 index 0000000000..be2d4a8f30 --- /dev/null +++ b/tests/integration/view/one_to_many/with_alias_test.go @@ -0,0 +1,169 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_OneToManyWithAliasOnOuter(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with alias on outer object", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + fullName: name + books { + name + } + } + `, + SDL: ` + type AuthorView { + fullName: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: `query { + AuthorView { + fullName + books { + name + } + } + }`, + Results: []map[string]any{ + { + "fullName": "Harper Lee", + "books": []map[string]any{ + { + "name": "To Kill a Mockingbird", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_OneToManyWithAliasOnInner(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with alias on inner object", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + fullName: name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + fullName: String + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: ` + query { + AuthorView { + name + books { + fullName + } + } + } + `, + Results: []map[string]any{ + { + "name": "Harper Lee", + "books": []map[string]any{ + { + "fullName": "To Kill a Mockingbird", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_many/with_count_test.go b/tests/integration/view/one_to_many/with_count_test.go new file mode 100644 index 0000000000..ffc6b4cacd --- /dev/null +++ b/tests/integration/view/one_to_many/with_count_test.go @@ -0,0 +1,158 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +// Note: This test partially documents: +// https://github.com/sourcenetwork/defradb/issues/2113 +func TestView_OneToManyWithCount_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with count", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + _count(books: {}) + } + `, + SDL: ` + type AuthorView { + name: String + _count: Int + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Go Set a Watchman", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: `query { + AuthorView { + name + _count + } + }`, + ExpectedError: "aggregate must be provided with a property to aggregate", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_OneToManyWithAliasedCount(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with aliased count", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + numberOfBooks: _count(books: {}) + } + `, + SDL: ` + type AuthorView { + name: String + numberOfBooks: Int + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Go Set a Watchman", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: ` + query { + AuthorView { + name + numberOfBooks + } + } + `, + Results: []map[string]any{ + { + "name": "Harper Lee", + "numberOfBooks": 2, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_many/with_introspection_test.go b/tests/integration/view/one_to_many/with_introspection_test.go new file mode 100644 index 0000000000..284bd059af --- /dev/null +++ b/tests/integration/view/one_to_many/with_introspection_test.go @@ -0,0 +1,131 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestView_OneToMany_GQLIntrospectionTest(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view, introspection test", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "AuthorView") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "AuthorView", + "fields": schema.DefaultFields.Append( + schema.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Append( + schema.Field{ + "name": "books", + "type": map[string]any{ + "kind": "LIST", + "name": nil, + }, + }, + ).Tidy(), + }, + }, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "BookView") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "BookView", + // Note: `_key`, `_version`, `_deleted`, etc should not be present, + // although aggregates and `_group` should be. + // There should also be no `Author` field - the relationship field + // should only exist on the parent. + "fields": schema.DefaultEmbeddedObjFields.Append( + schema.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_one/identical_schema_test.go b/tests/integration/view/one_to_one/identical_schema_test.go new file mode 100644 index 0000000000..fb82303134 --- /dev/null +++ b/tests/integration/view/one_to_one/identical_schema_test.go @@ -0,0 +1,94 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_many + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_OneToOneSameSchema(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to one view with same schema", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type LeftHand { + name: String + holding: RightHand @primary @relation(name: "left_right") + heldBy: RightHand @relation(name: "right_left") + } + type RightHand { + name: String + holding: LeftHand @primary @relation(name: "right_left") + heldBy: LeftHand @relation(name: "left_right") + } + `, + }, + testUtils.CreateView{ + Query: ` + LeftHand { + name + heldBy { + name + } + } + `, + // todo - such a setup appears to work, yet prevents the querying of `RightHand`s as the primary return object + // thought - although, perhaps if the view is defined as such, Left and right hands *could* be merged by us into a single table + SDL: ` + type HandView { + name: String + holding: HandView @primary + heldBy: HandView + } + `, + }, + // bae-f3db7a4d-3db1-5d57-9996-32c3fdff99d3 + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Left hand 1" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "Right hand 1", + "holding_id": "bae-f3db7a4d-3db1-5d57-9996-32c3fdff99d3" + }`, + }, + testUtils.Request{ + Request: ` + query { + HandView { + name + heldBy { + name + } + } + } + `, + Results: []map[string]any{ + { + "name": "Left hand 1", + "heldBy": map[string]any{ + "name": "Right hand 1", + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/simple/simple_test.go b/tests/integration/view/simple/simple_test.go new file mode 100644 index 0000000000..0e5aa0f4a7 --- /dev/null +++ b/tests/integration/view/simple/simple_test.go @@ -0,0 +1,162 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_Simple(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + UserView { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_SimpleMultipleDocs(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view, multiple docs", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred" + }`, + }, + testUtils.Request{ + Request: `query { + UserView { + name + } + }`, + Results: []map[string]any{ + { + "name": "Fred", + }, + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_SimpleWithFieldSubset_ErrorsSelectingExcludedField(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with field subset errors selecting excluded field", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + age + } + } + `, + ExpectedError: `Cannot query field "age" on type "UserView"`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/simple/with_alias_test.go b/tests/integration/view/simple/with_alias_test.go new file mode 100644 index 0000000000..0fd7e29db9 --- /dev/null +++ b/tests/integration/view/simple/with_alias_test.go @@ -0,0 +1,65 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_SimpleWithAlias(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with alias", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + fullname: name + } + `, + SDL: ` + type UserView { + fullname: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + fullname + } + } + `, + Results: []map[string]any{ + { + "fullname": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/simple/with_filter_test.go b/tests/integration/view/simple/with_filter_test.go new file mode 100644 index 0000000000..07b0e130ed --- /dev/null +++ b/tests/integration/view/simple/with_filter_test.go @@ -0,0 +1,135 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_SimpleWithFilter(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User(filter: {name: {_eq: "John"}}) { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + name + } + } + `, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_SimpleWithFilterOnViewAndQuery(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with filter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateView{ + Query: ` + User(filter: {name: {_eq: "John"}}) { + name + age + } + `, + SDL: ` + type UserView { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 30 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "age": 31 + }`, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "Fred", + "age": 31 + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView(filter: {age: {_eq: 31}}) { + name + } + } + `, + Results: []map[string]any{ + { + "name": "John", + "age": 31, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/simple/with_introspection_test.go b/tests/integration/view/simple/with_introspection_test.go new file mode 100644 index 0000000000..ada7d2cfcd --- /dev/null +++ b/tests/integration/view/simple/with_introspection_test.go @@ -0,0 +1,77 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package simple + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" + "github.com/sourcenetwork/defradb/tests/integration/schema" +) + +func TestView_Simple_GQLIntrospectionTest(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "UserView") { + name + fields { + name + type { + name + kind + } + } + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "UserView", + "fields": schema.DefaultFields.Append( + schema.Field{ + "name": "name", + "type": map[string]any{ + "kind": "SCALAR", + "name": "String", + }, + }, + ).Tidy(), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From c2f928db3ab7cf9b7c41944a1bda29327d225718 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 09:09:45 -0800 Subject: [PATCH 30/60] bot: Update dependencies (bulk dependabot PRs) 27-12-2023 (#2154) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2153 bot: Bump google.golang.org/protobuf from 1.31.0 to 1.32.0 #2151 bot: Bump github.com/go-chi/chi/v5 from 5.0.10 to 5.0.11 #2150 bot: Bump @typescript-eslint/parser from 6.14.0 to 6.15.0 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #2152 bot: Bump google.golang.org/grpc from 1.60.0 to 1.60.1 #2149 bot: Bump @typescript-eslint/eslint-plugin from 6.14.0 to 6.15.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 6 +- go.sum | 12 +- playground/package-lock.json | 306 +++++++++++++++++++++++++++++++---- playground/package.json | 4 +- 4 files changed, 290 insertions(+), 38 deletions(-) diff --git a/go.mod b/go.mod index 479a7bdfad..74a2397bcc 100644 --- a/go.mod +++ b/go.mod @@ -8,7 +8,7 @@ require ( github.com/evanphx/json-patch/v5 v5.7.0 github.com/fxamacker/cbor/v2 v2.5.0 github.com/getkin/kin-openapi v0.120.0 - github.com/go-chi/chi/v5 v5.0.10 + github.com/go-chi/chi/v5 v5.0.11 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.5.1 github.com/gofrs/uuid/v5 v5.0.0 @@ -49,8 +49,8 @@ require ( golang.org/x/crypto v0.17.0 golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa golang.org/x/net v0.19.0 - google.golang.org/grpc v1.60.0 - google.golang.org/protobuf v1.31.0 + google.golang.org/protobuf v1.32.0 + google.golang.org/grpc v1.60.1 ) require ( diff --git a/go.sum b/go.sum index 67c6de1d4f..2186809d03 100644 --- a/go.sum +++ b/go.sum @@ -98,8 +98,8 @@ github.com/getkin/kin-openapi v0.120.0 h1:MqJcNJFrMDFNc07iwE8iFC5eT2k/NPUFDIpNei github.com/getkin/kin-openapi v0.120.0/go.mod h1:PCWw/lfBrJY4HcdqE3jj+QFkaFK8ABoqo7PvqVhXXqw= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= -github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= -github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-chi/chi/v5 v5.0.11 h1:BnpYbFZ3T3S1WMpD79r7R5ThWX40TaFB7L31Y8xqSwA= +github.com/go-chi/chi/v5 v5.0.11/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= github.com/go-chi/cors v1.2.1 h1:xEC8UT3Rlp2QuWNEr4Fs/c2EAGVKBwy/1vHx3bppil4= github.com/go-chi/cors v1.2.1/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= @@ -728,8 +728,8 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.60.0 h1:6FQAR0kM31P6MRdeluor2w2gPaS4SVNrD/DNTxrQ15k= -google.golang.org/grpc v1.60.0/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= +google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU= +google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -741,8 +741,8 @@ google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpAD google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I= +google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= diff --git a/playground/package-lock.json b/playground/package-lock.json index 92af0a2b90..ab31730f70 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,8 +18,8 @@ "@types/react": "^18.2.45", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.14.0", - "@typescript-eslint/parser": "^6.14.0", + "@typescript-eslint/parser": "^6.15.0", + "@typescript-eslint/eslint-plugin": "^6.15.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2294,16 +2294,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.14.0.tgz", - "integrity": "sha512-1ZJBykBCXaSHG94vMMKmiHoL0MhNHKSVlcHVYZNw+BKxufhqQVTOawNpwwI1P5nIFZ/4jLVop0mcY6mJJDFNaw==", + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.15.0.tgz", + "integrity": "sha512-j5qoikQqPccq9QoBAupOP+CBu8BaJ8BLjaXSioDISeTZkVO3ig7oSIKh3H+rEpee7xCXtWwSB4KIL5l6hWZzpg==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.14.0", - "@typescript-eslint/type-utils": "6.14.0", - "@typescript-eslint/utils": "6.14.0", - "@typescript-eslint/visitor-keys": "6.14.0", + "@typescript-eslint/scope-manager": "6.15.0", + "@typescript-eslint/type-utils": "6.15.0", + "@typescript-eslint/utils": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2328,16 +2328,63 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz", + "integrity": "sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", + "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", + "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/parser": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.14.0.tgz", - "integrity": "sha512-QjToC14CKacd4Pa7JK4GeB/vHmWFJckec49FR4hmIRf97+KXole0T97xxu9IFiPxVQ1DBWrQ5wreLwAGwWAVQA==", + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.15.0.tgz", + "integrity": "sha512-MkgKNnsjC6QwcMdlNAel24jjkEO/0hQaMDLqP4S9zq5HBAUJNQB6y+3DwLjX7b3l2b37eNAxMPLwb3/kh8VKdA==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.14.0", - "@typescript-eslint/types": "6.14.0", - "@typescript-eslint/typescript-estree": "6.14.0", - "@typescript-eslint/visitor-keys": "6.14.0", + "@typescript-eslint/scope-manager": "6.15.0", + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/typescript-estree": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0", "debug": "^4.3.4" }, "engines": { @@ -2356,6 +2403,80 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz", + "integrity": "sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", + "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz", + "integrity": "sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", + "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz", @@ -2374,13 +2495,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.14.0.tgz", - "integrity": "sha512-x6OC9Q7HfYKqjnuNu5a7kffIYs3No30isapRBJl1iCHLitD8O0lFbRcVGiOcuyN837fqXzPZ1NS10maQzZMKqw==", + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.15.0.tgz", + "integrity": "sha512-CnmHKTfX6450Bo49hPg2OkIm/D/TVYV7jO1MCfPYGwf6x3GO0VU8YMO5AYMn+u3X05lRRxA4fWCz87GFQV6yVQ==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.14.0", - "@typescript-eslint/utils": "6.14.0", + "@typescript-eslint/typescript-estree": "6.15.0", + "@typescript-eslint/utils": "6.15.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2400,6 +2521,63 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", + "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz", + "integrity": "sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", + "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/types": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", @@ -2441,17 +2619,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.14.0.tgz", - "integrity": "sha512-XwRTnbvRr7Ey9a1NT6jqdKX8y/atWG+8fAIu3z73HSP8h06i3r/ClMhmaF/RGWGW1tHJEwij1uEg2GbEmPYvYg==", + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.15.0.tgz", + "integrity": "sha512-eF82p0Wrrlt8fQSRL0bGXzK5nWPRV2dYQZdajcfzOD9+cQz9O7ugifrJxclB+xVOvWvagXfqS4Es7vpLP4augw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.14.0", - "@typescript-eslint/types": "6.14.0", - "@typescript-eslint/typescript-estree": "6.14.0", + "@typescript-eslint/scope-manager": "6.15.0", + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/typescript-estree": "6.15.0", "semver": "^7.5.4" }, "engines": { @@ -2465,6 +2643,80 @@ "eslint": "^7.0.0 || ^8.0.0" } }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz", + "integrity": "sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", + "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz", + "integrity": "sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "@typescript-eslint/visitor-keys": "6.15.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", + "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.15.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/visitor-keys": { "version": "6.14.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", diff --git a/playground/package.json b/playground/package.json index 24268398fc..fd74466391 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,8 +20,8 @@ "@types/react": "^18.2.45", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.14.0", - "@typescript-eslint/parser": "^6.14.0", + "@typescript-eslint/parser": "^6.15.0", + "@typescript-eslint/eslint-plugin": "^6.15.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", From 31a4643cabc2efb823a64fdc44da580b4528a4d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 09:34:09 -0800 Subject: [PATCH 31/60] bot: Bump @typescript-eslint/eslint-plugin from 6.15.0 to 6.16.0 in /playground (#2155) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) from 6.15.0 to 6.16.0.
Release notes

Sourced from @​typescript-eslint/eslint-plugin's releases.

v6.16.0

6.16.0 (2023-12-25)

Bug Fixes

  • eslint-plugin: [unbound-method] exempt all non-Promise built-in statics (#8096) (3182959)

Features

  • eslint-plugin: deprecate formatting rules (#8073) (04dea84)
  • typescript-estree: add allowDefaultProjectForFiles project service allowlist option (#7752) (7ddadda)

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/eslint-plugin's changelog.

6.16.0 (2023-12-25)

Bug Fixes

  • eslint-plugin: [unbound-method] exempt all non-Promise built-in statics (#8096) (3182959)

Features

  • eslint-plugin: deprecate formatting (meta.type: layout) rules (#8073) (04dea84)
  • eslint-plugin: deprecate no-extra-semi in favor of ESLint Stylistic equivalent (#8123) (9368bf3)

You can read about our versioning strategy and releases on our website.

Commits
  • 7246e56 chore: publish v6.16.0
  • 3182959 fix(eslint-plugin): [unbound-method] exempt all non-Promise built-in statics ...
  • 3031117 docs(eslint-plugin): enforce a heading for each rule option (#8015)
  • b3f87fc docs(eslint-plugin): add Deprecating Formatting Rules blog post (#8117)
  • 9368bf3 feat(eslint-plugin): deprecate no-extra-semi in favor of ESLint Stylistic equ...
  • f6f6a57 docs: add tombstone file for no-parameter-properties and suggested replacemen...
  • 04dea84 feat(eslint-plugin): deprecate formatting (meta.type: layout) rules (#8073)
  • b1c92bb chore(website): auto-generate type checked rule notice in rule docs (#7951)
  • 26ba8ea docs: add more rationale for no-for-in-array (#8082)
  • eff7da1 docs: fix example for no-shadow (#8080)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/eslint-plugin&package-manager=npm_and_yarn&previous-version=6.15.0&new-version=6.16.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 255 +++++++---------------------------- playground/package.json | 2 +- 2 files changed, 52 insertions(+), 205 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index ab31730f70..1aa94f2685 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,8 +18,8 @@ "@types/react": "^18.2.45", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", + "@typescript-eslint/eslint-plugin": "^6.16.0", "@typescript-eslint/parser": "^6.15.0", - "@typescript-eslint/eslint-plugin": "^6.15.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2294,16 +2294,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.15.0.tgz", - "integrity": "sha512-j5qoikQqPccq9QoBAupOP+CBu8BaJ8BLjaXSioDISeTZkVO3ig7oSIKh3H+rEpee7xCXtWwSB4KIL5l6hWZzpg==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.16.0.tgz", + "integrity": "sha512-O5f7Kv5o4dLWQtPX4ywPPa+v9G+1q1x8mz0Kr0pXUtKsevo+gIJHLkGc8RxaZWtP8RrhwhSNIWThnW42K9/0rQ==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.15.0", - "@typescript-eslint/type-utils": "6.15.0", - "@typescript-eslint/utils": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0", + "@typescript-eslint/scope-manager": "6.16.0", + "@typescript-eslint/type-utils": "6.16.0", + "@typescript-eslint/utils": "6.16.0", + "@typescript-eslint/visitor-keys": "6.16.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2328,53 +2328,6 @@ } } }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz", - "integrity": "sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", - "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", - "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/parser": { "version": "6.15.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.15.0.tgz", @@ -2478,13 +2431,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.14.0.tgz", - "integrity": "sha512-VT7CFWHbZipPncAZtuALr9y3EuzY1b1t1AEkIq2bTXUPKw+pHoXflGNG5L+Gv6nKul1cz1VH8fz16IThIU0tdg==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.16.0.tgz", + "integrity": "sha512-0N7Y9DSPdaBQ3sqSCwlrm9zJwkpOuc6HYm7LpzLAPqBL7dmzAUimr4M29dMkOP/tEwvOCC/Cxo//yOfJD3HUiw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.14.0", - "@typescript-eslint/visitor-keys": "6.14.0" + "@typescript-eslint/types": "6.16.0", + "@typescript-eslint/visitor-keys": "6.16.0" }, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2495,13 +2448,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.15.0.tgz", - "integrity": "sha512-CnmHKTfX6450Bo49hPg2OkIm/D/TVYV7jO1MCfPYGwf6x3GO0VU8YMO5AYMn+u3X05lRRxA4fWCz87GFQV6yVQ==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.16.0.tgz", + "integrity": "sha512-ThmrEOcARmOnoyQfYkHw/DX2SEYBalVECmoldVuH6qagKROp/jMnfXpAU/pAIWub9c4YTxga+XwgAkoA0pxfmg==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.15.0", - "@typescript-eslint/utils": "6.15.0", + "@typescript-eslint/typescript-estree": "6.16.0", + "@typescript-eslint/utils": "6.16.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2521,10 +2474,10 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", - "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", + "node_modules/@typescript-eslint/types": { + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.16.0.tgz", + "integrity": "sha512-hvDFpLEvTJoHutVl87+MG/c5C8I6LOgEx05zExTSJDEVU7hhR3jhV8M5zuggbdFCw98+HhZWPHZeKS97kS3JoQ==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2534,17 +2487,18 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz", - "integrity": "sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew==", + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.16.0.tgz", + "integrity": "sha512-VTWZuixh/vr7nih6CfrdpmFNLEnoVBF1skfjdyGnNwXOH1SLeHItGdZDHhhAIzd3ACazyY2Fg76zuzOVTaknGA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0", + "@typescript-eslint/types": "6.16.0", + "@typescript-eslint/visitor-keys": "6.16.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", + "minimatch": "9.0.3", "semver": "^7.5.4", "ts-api-utils": "^1.0.1" }, @@ -2561,75 +2515,42 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", - "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.15.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.14.0.tgz", - "integrity": "sha512-uty9H2K4Xs8E47z3SnXEPRNDfsis8JO27amp2GNCnzGETEW3yTqEIVg5+AI7U276oGF/tw6ZA+UesxeQ104ceA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" + "balanced-match": "^1.0.0" } }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.14.0.tgz", - "integrity": "sha512-yPkaLwK0yH2mZKFE/bXkPAkkFgOv15GJAUzgUVonAbv0Hr4PK/N2yaA/4XQbTZQdygiDkpt5DkxPELqHguNvyw==", + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.14.0", - "@typescript-eslint/visitor-keys": "6.14.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" + "brace-expansion": "^2.0.1" }, "engines": { - "node": "^16.0.0 || >=18.0.0" + "node": ">=16 || 14 >=14.17" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/@typescript-eslint/utils": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.15.0.tgz", - "integrity": "sha512-eF82p0Wrrlt8fQSRL0bGXzK5nWPRV2dYQZdajcfzOD9+cQz9O7ugifrJxclB+xVOvWvagXfqS4Es7vpLP4augw==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.16.0.tgz", + "integrity": "sha512-T83QPKrBm6n//q9mv7oiSvy/Xq/7Hyw9SzSEhMHJwznEmQayfBM87+oAlkNAMEO7/MjIwKyOHgBJbxB0s7gx2A==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.15.0", - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/typescript-estree": "6.15.0", + "@typescript-eslint/scope-manager": "6.16.0", + "@typescript-eslint/types": "6.16.0", + "@typescript-eslint/typescript-estree": "6.16.0", "semver": "^7.5.4" }, "engines": { @@ -2643,87 +2564,13 @@ "eslint": "^7.0.0 || ^8.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz", - "integrity": "sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", - "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz", - "integrity": "sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", - "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.14.0.tgz", - "integrity": "sha512-fB5cw6GRhJUz03MrROVuj5Zm/Q+XWlVdIsFj+Zb1Hvqouc8t+XP2H5y53QYU/MGtd2dPg6/vJJlhoX3xc2ehfw==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.16.0.tgz", + "integrity": "sha512-QSFQLruk7fhs91a/Ep/LqRdbJCZ1Rq03rqBdKT5Ky17Sz8zRLUksqIe9DW0pKtg/Z35/ztbLQ6qpOCN6rOC11A==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.14.0", + "@typescript-eslint/types": "6.16.0", "eslint-visitor-keys": "^3.4.1" }, "engines": { diff --git a/playground/package.json b/playground/package.json index fd74466391..80b924c925 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/parser": "^6.15.0", - "@typescript-eslint/eslint-plugin": "^6.15.0", + "@typescript-eslint/eslint-plugin": "^6.16.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", From 41581a4debf3f9d97d5c8e3296adb4ef25d3537f Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Wed, 27 Dec 2023 14:40:09 -0500 Subject: [PATCH 32/60] fix(i): Add dep kubo v0.25.0 for ipfs v0.16.0 (#2158) ## Relevant issue(s) Resolves #2157 ## Description - Fix broken mod tidy, by adding the Kubo(v0.25.0) dependency for IPFS(v0.16.0), because it was removed after v0.15.0. More detail: https://github.com/ipfs/boxo/issues/522 --- go.mod | 22 ++++++++++++---------- go.sum | 50 ++++++++++++++++++++++++++++++-------------------- 2 files changed, 42 insertions(+), 30 deletions(-) diff --git a/go.mod b/go.mod index 74a2397bcc..f68cbe96ab 100644 --- a/go.mod +++ b/go.mod @@ -22,7 +22,7 @@ require ( github.com/ipfs/go-log/v2 v2.5.1 github.com/jbenet/goprocess v0.1.4 github.com/lens-vm/lens/host-go v0.0.0-20231127204031-8d858ed2926c - github.com/libp2p/go-libp2p v0.32.1 + github.com/libp2p/go-libp2p v0.32.2 github.com/libp2p/go-libp2p-gostream v0.6.0 github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/libp2p/go-libp2p-pubsub v0.10.0 @@ -47,10 +47,10 @@ require ( go.opentelemetry.io/otel/sdk/metric v1.21.0 go.uber.org/zap v1.26.0 golang.org/x/crypto v0.17.0 - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa + golang.org/x/exp v0.0.0-20231127185646-65229373498e golang.org/x/net v0.19.0 - google.golang.org/protobuf v1.32.0 google.golang.org/grpc v1.60.1 + google.golang.org/protobuf v1.32.0 ) require ( @@ -90,7 +90,7 @@ require ( github.com/gorilla/websocket v1.5.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/hashicorp/golang-lru/arc/v2 v2.0.5 // indirect github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect github.com/hashicorp/hcl v1.0.0 // indirect @@ -104,6 +104,7 @@ require ( github.com/ipfs/go-ipld-legacy v0.2.1 // indirect github.com/ipfs/go-metrics-interface v0.0.1 // indirect github.com/ipfs/go-peertaskqueue v0.8.1 // indirect + github.com/ipfs/kubo v0.25.0 // indirect github.com/ipld/go-codec-dagpb v1.6.0 // indirect github.com/ipld/go-ipld-prime v0.21.0 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect @@ -127,8 +128,8 @@ require ( github.com/mailru/easyjson v0.7.7 // indirect github.com/marten-seemann/tcp v0.0.0-20210406111302-dfbc87cc63fd // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/miekg/dns v1.1.56 // indirect + github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect + github.com/miekg/dns v1.1.57 // indirect github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b // indirect github.com/mikioh/tcpopt v0.0.0-20190314235656-172688c1accc // indirect github.com/minio/sha256-simd v1.0.1 // indirect @@ -153,11 +154,11 @@ require ( github.com/polydawn/refmt v0.89.0 // indirect github.com/prometheus/client_golang v1.17.0 // indirect github.com/prometheus/client_model v0.5.0 // indirect - github.com/prometheus/common v0.44.0 // indirect + github.com/prometheus/common v0.45.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect github.com/quic-go/qpack v0.4.0 // indirect github.com/quic-go/qtls-go1-20 v0.3.4 // indirect - github.com/quic-go/quic-go v0.39.3 // indirect + github.com/quic-go/quic-go v0.39.4 // indirect github.com/quic-go/webtransport-go v0.6.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect @@ -169,6 +170,7 @@ require ( github.com/spf13/cast v1.6.0 // indirect github.com/stretchr/objx v0.5.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect + github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect github.com/textileio/go-log/v2 v2.1.3-gke-2 // indirect github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 // indirect github.com/x448/float16 v0.8.4 // indirect @@ -184,8 +186,8 @@ require ( golang.org/x/sync v0.5.0 // indirect golang.org/x/sys v0.15.0 // indirect golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.15.0 // indirect - gonum.org/v1/gonum v0.13.0 // indirect + golang.org/x/tools v0.16.0 // indirect + gonum.org/v1/gonum v0.14.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 2186809d03..0c28b4180c 100644 --- a/go.sum +++ b/go.sum @@ -154,6 +154,7 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= +github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/flatbuffers v2.0.6+incompatible h1:XHFReMv7nFFusa+CEokzWbzaYocKXI6C7hdU5Kgh9Lw= github.com/google/flatbuffers v2.0.6+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= @@ -191,8 +192,8 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d h1:dg1dEPuWpEqDnvIw251EVy4zlP8gWbsGj4BsUKCRpYs= -github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= github.com/hashicorp/golang-lru/arc/v2 v2.0.5 h1:l2zaLDubNhW4XO3LnliVj0GXO3+/CGNJAg1dcN2Fpfw= github.com/hashicorp/golang-lru/arc/v2 v2.0.5/go.mod h1:ny6zBSQZi2JxIeYcv7kt2sH2PXJtirBN7RDhRpxPkxU= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= @@ -245,7 +246,8 @@ github.com/ipfs/go-metrics-interface v0.0.1 h1:j+cpbjYvu4R8zbleSs36gvB7jR+wsL2fG github.com/ipfs/go-metrics-interface v0.0.1/go.mod h1:6s6euYU4zowdslK0GKHmqaIZ3j/b/tL7HTWtJ4VPgWY= github.com/ipfs/go-peertaskqueue v0.8.1 h1:YhxAs1+wxb5jk7RvS0LHdyiILpNmRIRnZVztekOF0pg= github.com/ipfs/go-peertaskqueue v0.8.1/go.mod h1:Oxxd3eaK279FxeydSPPVGHzbwVeHjatZ2GA8XD+KbPU= -github.com/ipfs/kubo v0.24.0 h1:2BAnH9j6RojzmXwQNnI2Mhj6qzs5c5jzlAzv7N9sph4= +github.com/ipfs/kubo v0.25.0 h1:VKy9oOBW34xTqwi70FPRsoA3vJJBSdaYAbgIm5NmIbY= +github.com/ipfs/kubo v0.25.0/go.mod h1:ZWSvdTvD7VLqYdquESyGTAkbiqODbLwyNCuqeOtPKsQ= github.com/ipld/go-codec-dagpb v1.6.0 h1:9nYazfyu9B1p3NAgfVdpRco3Fs2nFC72DqVsMj6rOcc= github.com/ipld/go-codec-dagpb v1.6.0/go.mod h1:ANzFhfP2uMJxRBr8CE+WQWs5UsNa0pYtmKZ+agnUw9s= github.com/ipld/go-ipld-prime v0.21.0 h1:n4JmcpOlPDIxBcY037SVfpd1G+Sj1nKZah0m6QH9C2E= @@ -289,8 +291,8 @@ github.com/libp2p/go-cidranger v1.1.0 h1:ewPN8EZ0dd1LSnrtuwd4709PXVcITVeuwbag38y github.com/libp2p/go-cidranger v1.1.0/go.mod h1:KWZTfSr+r9qEo9OkI9/SIEeAtw+NNoU0dXIXt15Okic= github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM= github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro= -github.com/libp2p/go-libp2p v0.32.1 h1:wy1J4kZIZxOaej6NveTWCZmHiJ/kY7GoAqXgqNCnPps= -github.com/libp2p/go-libp2p v0.32.1/go.mod h1:hXXC3kXPlBZ1eu8Q2hptGrMB4mZ3048JUoS4EKaHW5c= +github.com/libp2p/go-libp2p v0.32.2 h1:s8GYN4YJzgUoyeYNPdW7JZeZ5Ee31iNaIBfGYMAY4FQ= +github.com/libp2p/go-libp2p v0.32.2/go.mod h1:E0LKe+diV/ZVJVnOJby8VC5xzHF0660osg71skcxJvk= github.com/libp2p/go-libp2p-asn-util v0.3.0 h1:gMDcMyYiZKkocGXDQ5nsUQyquC9+H+iLEQHwOCZ7s8s= github.com/libp2p/go-libp2p-asn-util v0.3.0/go.mod h1:B1mcOrKUE35Xq/ASTmQ4tN3LNzVVaMNmq2NACuqyB9w= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= @@ -329,12 +331,12 @@ github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27k github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= -github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg= +github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k= github.com/microcosm-cc/bluemonday v1.0.1/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/dns v1.1.56 h1:5imZaSeoRNvpM9SzWNhEcP9QliKiz20/dA2QabIGVnE= -github.com/miekg/dns v1.1.56/go.mod h1:cRm6Oo2C8TY9ZS/TqsSrseAcncm74lfK5G+ikN2SWWY= +github.com/miekg/dns v1.1.57 h1:Jzi7ApEIzwEPLHWRcafCN9LZSBbqQpxjt/wpgvg7wcM= +github.com/miekg/dns v1.1.57/go.mod h1:uqRjCRUuEAA6qsOiJvDd+CFo/vW+y5WR6SNmHE55hZk= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c h1:bzE/A84HN25pxAuk9Eej1Kz9OUelF97nAc82bDquQI8= github.com/mikioh/tcp v0.0.0-20190314235350-803a9b46060c/go.mod h1:0SQS9kMwD2VsyFEB++InYyBJroV/FRmBgcydeSUcJms= github.com/mikioh/tcpinfo v0.0.0-20190314235526-30a79bb1804b h1:z78hV3sbSMAUoyUMM0I83AUIT6Hu17AWfgjzIbtrYFc= @@ -387,6 +389,7 @@ github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4= @@ -423,8 +426,8 @@ github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1: github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI= github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= -github.com/prometheus/common v0.44.0 h1:+5BrQJwiBB9xsMygAB3TNvpQKOwlkc25LbISbrdOOfY= -github.com/prometheus/common v0.44.0/go.mod h1:ofAIvZbQ1e/nugmZGz4/qCb9Ap1VoSTIO7x0VV9VvuY= +github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM= +github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY= github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= @@ -432,8 +435,8 @@ github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo= github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A= github.com/quic-go/qtls-go1-20 v0.3.4 h1:MfFAPULvst4yoMgY9QmtpYmfij/em7O8UUi+bNVm7Cg= github.com/quic-go/qtls-go1-20 v0.3.4/go.mod h1:X9Nh97ZL80Z+bX/gUXMbipO6OxdiDi58b/fMC9mAL+k= -github.com/quic-go/quic-go v0.39.3 h1:o3YB6t2SR+HU/pgwF29kJ6g4jJIJEwEZ8CKia1h1TKg= -github.com/quic-go/quic-go v0.39.3/go.mod h1:T09QsDQWjLiQ74ZmacDfqZmhY/NLnw5BC40MANNNZ1Q= +github.com/quic-go/quic-go v0.39.4 h1:PelfiuG7wXEffUT2yceiqz5V6Pc0TA5ruOd1LcmFc1s= +github.com/quic-go/quic-go v0.39.4/go.mod h1:T09QsDQWjLiQ74ZmacDfqZmhY/NLnw5BC40MANNNZ1Q= github.com/quic-go/webtransport-go v0.6.0 h1:CvNsKqc4W2HljHJnoT+rMmbRJybShZ0YPFDD3NxaZLY= github.com/quic-go/webtransport-go v0.6.0/go.mod h1:9KjU4AEBqEQidGHNDkZrb8CAa1abRaosM2yGOyiikEc= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= @@ -448,6 +451,7 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= +github.com/samber/lo v1.39.0 h1:4gTz1wUhNYLhFSKl6O+8peW0v2F4BCY034GRpU9WnuA= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/shurcooL/component v0.0.0-20170202220835-f88ec8f54cc4/go.mod h1:XhFIlyj5a1fBNx5aJTbKoIq0mNaPvOagO+HjB3EtxrY= github.com/shurcooL/events v0.0.0-20181021180414-410e4ca65f48/go.mod h1:5u70Mqkb5O5cxEA8nxTsgrgLehJeAw6Oc4Ab1c/P1HM= @@ -518,7 +522,8 @@ github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcU github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= -github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= +github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= +github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= github.com/textileio/go-datastore-extensions v1.0.1 h1:qIJGqJaigQ1wD4TdwS/hf73u0HChhXvvUSJuxBEKS+c= github.com/textileio/go-ds-badger3 v0.1.0 h1:q0kBuBmAcRUR3ClMSYlyw0224XeuzjjGinU53Qz1uXI= @@ -539,6 +544,7 @@ github.com/vito/go-sse v1.0.0/go.mod h1:2wkcaQ+jtlZ94Uve8gYZjFpL68luAjssTINA2hpg github.com/warpfork/go-testmark v0.12.1 h1:rMgCpJfwy1sJ50x0M0NgyphxYYPMOODIJHhsXyEHU0s= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0 h1:GDDkbFiaK8jsSDJfjId/PEGEShv6ugrt4kYsC5UIDaQ= github.com/warpfork/go-wish v0.0.0-20220906213052-39a1cc7a02d0/go.mod h1:x6AKhvSSexNrVSrViXSHUEbICjmGXhtgABaHIySUSGw= +github.com/whyrusleeping/base32 v0.0.0-20170828182744-c30ac30633cc h1:BCPnHtcboadS0DvysUuJXZ4lWVv5Bh5i7+tbIyi+ck4= github.com/whyrusleeping/chunker v0.0.0-20181014151217-fe64bd25879f h1:jQa4QT2UP9WYv2nzyawpKMOCl+Z/jW7djv2/J50lj9E= github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1 h1:EKhdznlJHPMoKr0XTrX+IlJs1LH3lyx2nfr1dOlZ79k= github.com/whyrusleeping/go-keyspace v0.0.0-20160322163242-5b898ac5add1/go.mod h1:8UvriyWtv5Q5EOgjHaSseUEdkQfvwFv1I/In/O2M9gc= @@ -594,8 +600,8 @@ golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= -golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= +golang.org/x/exp v0.0.0-20231127185646-65229373498e h1:Gvh4YaCaXNs6dKTlfgismwWZKyjVZXwOPfIyUaqU3No= +golang.org/x/exp v0.0.0-20231127185646-65229373498e/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -622,6 +628,7 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= @@ -657,7 +664,9 @@ golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200124204421-9fbb57f87de9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -673,6 +682,7 @@ golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= @@ -696,14 +706,14 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.15.0 h1:zdAyfUGbYmuVokhzVmghFl2ZJh5QhcfebBgmVPFYA+8= -golang.org/x/tools v0.15.0/go.mod h1:hpksKq4dtpQWS1uQ61JkdqWM3LscIS6Slf+VVkm+wQk= +golang.org/x/tools v0.16.0 h1:GO788SKMRunPIBCXiQyo2AaexLstOrVhuAL5YwsckQM= +golang.org/x/tools v0.16.0/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -gonum.org/v1/gonum v0.13.0 h1:a0T3bh+7fhRyqeNbiC3qVHYmkiQgit3wnNan/2c0HMM= -gonum.org/v1/gonum v0.13.0/go.mod h1:/WPYRckkfWrhWefxyYTfrTtQR0KH4iyHNuzxqXAKyAU= +gonum.org/v1/gonum v0.14.0 h1:2NiG67LD1tEH0D7kM+ps2V+fXmsAnpUeec7n8tcr4S0= +gonum.org/v1/gonum v0.14.0/go.mod h1:AoWeoz0becf9QMWtE8iWXNXc27fK4fNeHNf/oMejGfU= google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.0.0-20181030000543-1d582fd0359e/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0= google.golang.org/api v0.1.0/go.mod h1:UGEZY7KEX120AnNLIHFMKIo4obdJhkp2tPbaPlQx13Y= From 4931a78accc667c9d023c67dd314190c3a1d07a6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Dec 2023 20:47:54 -0500 Subject: [PATCH 33/60] bot: Bump @typescript-eslint/parser from 6.15.0 to 6.16.0 in /playground (#2156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/parser](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/parser) from 6.15.0 to 6.16.0.
Release notes

Sourced from @​typescript-eslint/parser's releases.

v6.16.0

6.16.0 (2023-12-25)

Bug Fixes

  • eslint-plugin: [unbound-method] exempt all non-Promise built-in statics (#8096) (3182959)

Features

  • eslint-plugin: deprecate formatting rules (#8073) (04dea84)
  • typescript-estree: add allowDefaultProjectForFiles project service allowlist option (#7752) (7ddadda)

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/parser's changelog.

6.16.0 (2023-12-25)

Note: Version bump only for package @​typescript-eslint/parser

You can read about our versioning strategy and releases on our website.

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/parser&package-manager=npm_and_yarn&previous-version=6.15.0&new-version=6.16.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 90 ++++-------------------------------- playground/package.json | 2 +- 2 files changed, 9 insertions(+), 83 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 1aa94f2685..62abce1552 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -19,7 +19,7 @@ "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.16.0", - "@typescript-eslint/parser": "^6.15.0", + "@typescript-eslint/parser": "^6.16.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -2329,15 +2329,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.15.0.tgz", - "integrity": "sha512-MkgKNnsjC6QwcMdlNAel24jjkEO/0hQaMDLqP4S9zq5HBAUJNQB6y+3DwLjX7b3l2b37eNAxMPLwb3/kh8VKdA==", + "version": "6.16.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.16.0.tgz", + "integrity": "sha512-H2GM3eUo12HpKZU9njig3DF5zJ58ja6ahj1GoHEHOgQvYxzoFJJEvC1MQ7T2l9Ha+69ZSOn7RTxOdpC/y3ikMw==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.15.0", - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/typescript-estree": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0", + "@typescript-eslint/scope-manager": "6.16.0", + "@typescript-eslint/types": "6.16.0", + "@typescript-eslint/typescript-estree": "6.16.0", + "@typescript-eslint/visitor-keys": "6.16.0", "debug": "^4.3.4" }, "engines": { @@ -2356,80 +2356,6 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.15.0.tgz", - "integrity": "sha512-+BdvxYBltqrmgCNu4Li+fGDIkW9n//NrruzG9X1vBzaNK+ExVXPoGB71kneaVw/Jp+4rH/vaMAGC6JfMbHstVg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.15.0.tgz", - "integrity": "sha512-yXjbt//E4T/ee8Ia1b5mGlbNj9fB9lJP4jqLbZualwpP2BCQ5is6BcWwxpIsY4XKAhmdv3hrW92GdtJbatC6dQ==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.15.0.tgz", - "integrity": "sha512-7mVZJN7Hd15OmGuWrp2T9UvqR2Ecg+1j/Bp1jXUEY2GZKV6FXlOIoqVDmLpBiEiq3katvj/2n2mR0SDwtloCew==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "@typescript-eslint/visitor-keys": "6.15.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.15.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.15.0.tgz", - "integrity": "sha512-1zvtdC1a9h5Tb5jU9x3ADNXO9yjP8rXlaoChu0DQX40vf5ACVpYIVIZhIMZ6d5sDXH7vq4dsZBT1fEGj8D2n2w==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.15.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/scope-manager": { "version": "6.16.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.16.0.tgz", diff --git a/playground/package.json b/playground/package.json index 80b924c925..17182d0ac1 100644 --- a/playground/package.json +++ b/playground/package.json @@ -20,7 +20,7 @@ "@types/react": "^18.2.45", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/parser": "^6.15.0", + "@typescript-eslint/parser": "^6.16.0", "@typescript-eslint/eslint-plugin": "^6.16.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", From 710b84c9fb8dedcc99d8686d185cea6a16cfd542 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 08:40:45 -0800 Subject: [PATCH 34/60] bot: Bump @types/react from 18.2.45 to 18.2.46 in /playground (#2159) Bumps [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) from 18.2.45 to 18.2.46.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@types/react&package-manager=npm_and_yarn&previous-version=18.2.45&new-version=18.2.46)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 62abce1552..9a05301ece 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,7 +15,7 @@ "swagger-ui-react": "^5.10.5" }, "devDependencies": { - "@types/react": "^18.2.45", + "@types/react": "^18.2.46", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.16.0", @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.45", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.45.tgz", - "integrity": "sha512-TtAxCNrlrBp8GoeEp1npd5g+d/OejJHFxS3OWmrPBMFaVQMSN0OFySozJio5BHxTuTeug00AVXVAjfDSfk+lUg==", + "version": "18.2.46", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.46.tgz", + "integrity": "sha512-nNCvVBcZlvX4NU1nRRNV/mFl1nNRuTuslAJglQsq+8ldXe5Xv0Wd2f7WTE3jOxhLH2BFfiZGC6GCp+kHQbgG+w==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", diff --git a/playground/package.json b/playground/package.json index 17182d0ac1..85cf771f54 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,7 +17,7 @@ "swagger-ui-react": "^5.10.5" }, "devDependencies": { - "@types/react": "^18.2.45", + "@types/react": "^18.2.46", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/parser": "^6.16.0", From b774ff1f9092135c931233089f5e6c5cf911b2ba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 12:31:06 -0800 Subject: [PATCH 35/60] bot: Bump github.com/bits-and-blooms/bitset from 1.12.0 to 1.13.0 (#2160) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/bits-and-blooms/bitset](https://github.com/bits-and-blooms/bitset) from 1.12.0 to 1.13.0.
Release notes

Sourced from github.com/bits-and-blooms/bitset's releases.

Version 1.13.0

What's Changed

Full Changelog: https://github.com/bits-and-blooms/bitset/compare/v1.12.0...v.1.13.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/bits-and-blooms/bitset&package-manager=go_modules&previous-version=1.12.0&new-version=1.13.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index f68cbe96ab..11420aaf2b 100644 --- a/go.mod +++ b/go.mod @@ -3,7 +3,7 @@ module github.com/sourcenetwork/defradb go 1.20 require ( - github.com/bits-and-blooms/bitset v1.12.0 + github.com/bits-and-blooms/bitset v1.13.0 github.com/bxcodec/faker v2.0.1+incompatible github.com/evanphx/json-patch/v5 v5.7.0 github.com/fxamacker/cbor/v2 v2.5.0 diff --git a/go.sum b/go.sum index 0c28b4180c..52bbcea39e 100644 --- a/go.sum +++ b/go.sum @@ -21,8 +21,8 @@ github.com/benbjohnson/clock v1.3.5/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZx github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bits-and-blooms/bitset v1.12.0 h1:U/q1fAF7xXRhFCrhROzIfffYnu+dlS38vCZtmFVPHmA= -github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= +github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE= +github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= github.com/bradfitz/go-smtpd v0.0.0-20170404230938-deb6d6237625/go.mod h1:HYsPBTaaSFSlLx/70C2HPIMNZpVV8+vt/A+FMnYP11g= github.com/buger/jsonparser v0.0.0-20181115193947-bf1c66bbce23/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bxcodec/faker v2.0.1+incompatible h1:P0KUpUw5w6WJXwrPfv35oc91i4d8nf40Nwln+M/+faA= From 9da4dab33681781c444db5153f3d8d4799773c04 Mon Sep 17 00:00:00 2001 From: Shahzad Lone Date: Thu, 4 Jan 2024 18:03:00 -0500 Subject: [PATCH 36/60] refactor: Rename key,id,dockey to docID terminology (#1749) ## Relevant issue(s) - Subtask of #1750 [EPIC] - [x] Resolves #1752 - [x] Resolves #1272 ## BREAKING CHANGE: - Use of `_key` to access a document's unique id is now deprecated, instead use `_docID`. - Use of `dockey`/`id` is now deprecated, instead use `docID`. - Use of `dockeys`/`ids` is now deprecated, instead use `docIDs`. ## Description - [x] Rename `_key` to `_docID` everywhere. - [x] Rename `_keys` to `docIDs` in explain the response. - [x] Rename `_newKey` to `_docIDNew` in backup/recover functionality. - [x] Fix `_docID` tests. - [x] Fix explain and backup/recover functionality tests. - [x] Fix the collectionID order for a P2P test (leaving a note as order was reverted). - [x] Update all cids. - [x] Rename all files with `key(s)|dockey(s)` in the name to `doc_id(s)`. - [x] Document breaking change to pass change detector. ## For Reviewers: - Main commits to review are the `PR(MAIN)` commits. - If you have more time `PR(MINOR) and PR(*TEST)` commits are good to go over ## Disclaimer / Discussion: I do not like these non-underscored `docID/docIDs`, would be in favor of underscoring these : ``` query { User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { _docID } } ``` ``` query { Users { Name _docID _version { docID } } } ``` ``` Request: ` { commits(groupBy: [docID], order: {docID: DESC}) { docID } }`, Results: []map[string]any{ { "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { "docID": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", }, }, ``` EDIT: Above was resolved with #2162, to do out of this PR. ## Limitations (out of scope of this PR): - #1467 - #1751 - #1550 - #2162 --- Makefile | 20 +- README.md | 14 +- cli/cli.go | 2 +- cli/collection_delete.go | 34 +- cli/collection_get.go | 6 +- ...ion_keys.go => collection_list_doc_ids.go} | 22 +- cli/collection_update.go | 42 +-- cli/errors.go | 2 +- cli/version_test.go | 18 +- client/collection.go | 99 +++--- client/descriptions.go | 8 +- client/{dockey.go => doc_id.go} | 64 ++-- client/document.go | 71 ++-- client/document_test.go | 12 +- client/errors.go | 27 +- client/mocks/collection.go | 322 +++++++++--------- client/mocks/db.go | 150 +++++--- client/request/commit.go | 2 +- client/request/consts.go | 24 +- client/request/mutation.go | 6 +- client/request/select.go | 8 +- client/request/subscription.go | 10 +- core/crdt/base_test.go | 8 +- core/crdt/composite.go | 8 +- core/crdt/lwwreg.go | 8 +- core/crdt/lwwreg_test.go | 2 +- core/doc.go | 18 +- core/key.go | 52 +-- core/key_test.go | 26 +- db/backup.go | 48 +-- db/backup_test.go | 28 +- db/base/collection_keys.go | 17 +- db/collection.go | 147 ++++---- db/collection_delete.go | 86 ++--- db/collection_get.go | 14 +- db/collection_index.go | 4 +- db/collection_update.go | 76 ++--- db/errors.go | 115 ++----- db/fetcher/encoded_doc.go | 26 +- db/fetcher/fetcher.go | 22 +- db/fetcher/indexer.go | 6 +- db/fetcher/mocks/encoded_document.go | 20 +- db/fetcher/versioned.go | 28 +- db/index.go | 6 +- db/indexed_docs_test.go | 22 +- db/subscriptions.go | 2 +- docs/cli/defradb_client.md | 1 + docs/cli/defradb_client_collection.md | 6 +- docs/cli/defradb_client_collection_delete.md | 12 +- ...md => defradb_client_collection_docIDs.md} | 12 +- docs/cli/defradb_client_collection_get.md | 2 +- docs/cli/defradb_client_collection_update.md | 14 +- docs/cli/defradb_client_document.md | 38 --- docs/cli/defradb_client_document_create.md | 44 --- docs/cli/defradb_client_document_delete.md | 46 --- docs/cli/defradb_client_document_save.md | 42 --- docs/cli/defradb_client_document_update.md | 52 --- docs/cli/defradb_client_index_create.md | 4 +- ...document_get.md => defradb_client_view.md} | 21 +- ...ent_keys.md => defradb_client_view_add.md} | 22 +- .../i1749-rename-key-to-doc-id-terminology.md | 7 + events/db_update.go | 2 +- examples/request/user_creation.graphql | 4 +- examples/request/user_query.graphql | 4 +- http/client_collection.go | 92 ++--- http/errors.go | 10 - http/handler_collection.go | 98 +++--- lens/fetcher.go | 10 +- merkle/clock/clock_test.go | 8 +- merkle/clock/heads_test.go | 2 +- net/client.go | 8 +- net/client_test.go | 6 +- net/dag.go | 10 +- net/dag_test.go | 8 +- net/doc.go | 2 +- net/errors.go | 22 +- net/pb/net.pb.go | 120 +++---- net/pb/net.proto | 6 +- net/pb/net_grpc.pb.go | 2 +- net/pb/net_vtproto.pb.go | 36 +- net/peer.go | 50 +-- net/peer_collection.go | 12 +- net/peer_replicator.go | 4 +- net/peer_test.go | 46 +-- net/process.go | 8 +- net/server.go | 60 ++-- net/server_test.go | 26 +- planner/commit.go | 28 +- planner/create.go | 6 +- planner/delete.go | 10 +- planner/errors.go | 7 +- planner/explain.go | 4 +- planner/group.go | 6 +- planner/mapper/commitSelect.go | 4 +- planner/mapper/mapper.go | 10 +- planner/mapper/targetable.go | 6 +- planner/multi.go | 18 +- planner/scan.go | 2 +- planner/select.go | 44 +-- planner/type_join.go | 16 +- planner/type_join.md | 18 +- planner/update.go | 11 +- request/graphql/parser/commit.go | 4 +- request/graphql/parser/mutation.go | 4 +- request/graphql/parser/query.go | 18 +- request/graphql/schema/collection.go | 16 +- request/graphql/schema/descriptions.go | 40 +-- request/graphql/schema/descriptions_test.go | 66 ++-- request/graphql/schema/generate.go | 40 +-- request/graphql/schema/types/commits.go | 28 +- request/graphql/schema/types/descriptions.go | 10 +- tests/bench/bench_util.go | 14 +- tests/bench/collection/utils.go | 18 +- tests/bench/query/index/simple_test.go | 2 +- tests/bench/query/planner/simple_test.go | 2 +- tests/bench/query/simple/simple_test.go | 2 +- tests/bench/query/simple/utils.go | 34 +- tests/bench/query/simple/with_filter_test.go | 2 +- .../query/simple/with_limit_offset_test.go | 2 +- .../query/simple/with_multi_lookup_test.go | 6 +- tests/bench/query/simple/with_order_test.go | 2 +- .../query/simple/with_single_lookup_test.go | 6 +- tests/clients/cli/wrapper_collection.go | 94 ++--- tests/gen/gen_auto.go | 18 +- tests/gen/gen_auto_configurator.go | 34 +- tests/gen/gen_auto_test.go | 42 +-- .../backup/one_to_many/export_test.go | 6 +- .../backup/one_to_many/import_test.go | 22 +- .../backup/one_to_one/export_test.go | 8 +- .../backup/one_to_one/import_test.go | 34 +- .../backup/self_reference/export_test.go | 4 +- .../backup/self_reference/import_test.go | 16 +- .../integration/backup/simple/export_test.go | 6 +- .../integration/backup/simple/import_test.go | 8 +- .../{with_key_test.go => with_doc_id_test.go} | 22 +- ...with_keys_test.go => with_doc_ids_test.go} | 26 +- .../update/simple/with_filter_test.go | 4 +- .../events/simple/with_create_test.go | 8 +- .../events/simple/with_create_txn_test.go | 6 +- .../events/simple/with_delete_test.go | 8 +- .../events/simple/with_update_test.go | 14 +- tests/integration/events/utils.go | 4 +- .../integration/explain/debug/dagscan_test.go | 20 +- .../integration/explain/debug/delete_test.go | 22 +- .../explain/debug/delete_with_error_test.go | 2 +- ...est.go => group_with_doc_id_child_test.go} | 6 +- ...ckey_test.go => group_with_doc_id_test.go} | 12 +- ...o => type_join_with_filter_doc_id_test.go} | 12 +- .../integration/explain/debug/update_test.go | 16 +- ...key_test.go => with_filter_doc_id_test.go} | 30 +- .../explain/debug/with_sum_join_test.go | 2 +- .../integration/explain/default/basic_test.go | 2 +- .../explain/default/dagscan_test.go | 20 +- .../explain/default/delete_test.go | 34 +- .../explain/default/delete_with_error_test.go | 2 +- tests/integration/explain/default/fixture.go | 2 +- .../default/group_with_average_test.go | 8 +- ...est.go => group_with_doc_id_child_test.go} | 8 +- ...ckey_test.go => group_with_doc_id_test.go} | 12 +- .../default/group_with_filter_child_test.go | 4 +- .../default/group_with_limit_child_test.go | 6 +- .../explain/default/group_with_limit_test.go | 2 +- .../default/group_with_order_child_test.go | 6 +- .../explain/default/group_with_order_test.go | 2 +- .../explain/default/invalid_type_arg_test.go | 2 +- .../explain/default/type_join_many_test.go | 2 +- .../explain/default/type_join_one_test.go | 4 +- .../explain/default/type_join_test.go | 4 +- ...o => type_join_with_filter_doc_id_test.go} | 16 +- .../default/type_join_with_filter_test.go | 4 +- .../explain/default/update_test.go | 24 +- ...key_test.go => with_filter_doc_id_test.go} | 42 +-- .../explain/default/with_sum_join_test.go | 2 +- .../explain/execute/dagscan_test.go | 4 +- .../explain/execute/delete_test.go | 4 +- tests/integration/explain/execute/fixture.go | 12 +- .../execute/query_deleted_docs_test.go | 6 +- .../explain/execute/update_test.go | 4 +- .../integration/explain/simple/basic_test.go | 4 +- tests/integration/index/create_unique_test.go | 6 +- .../one_to_many/with_alias_test.go | 24 +- .../field_kinds/one_to_one/with_alias_test.go | 10 +- .../one_to_one/with_simple_test.go | 22 +- .../one_to_one_to_one/with_txn_test.go | 116 +++---- .../mutation/create/simple_test.go | 12 +- .../mutation/create/with_version_test.go | 2 +- .../one_to_many/with_show_deleted_test.go | 16 +- .../one_to_one_to_one/with_id_test.go | 18 +- .../one_to_one_to_one/with_txn_test.go | 114 +++---- .../mutation/delete/simple_test.go | 4 +- .../delete/with_deleted_field_test.go | 8 +- .../mutation/delete/with_id_alias_test.go | 6 +- .../mutation/delete/with_id_test.go | 12 +- .../mutation/delete/with_id_txn_test.go | 8 +- .../mutation/delete/with_ids_alias_test.go | 10 +- .../mutation/delete/with_ids_filter_test.go | 8 +- .../mutation/delete/with_ids_test.go | 28 +- .../mutation/delete/with_ids_txn_test.go | 12 +- .../delete/with_ids_update_alias_test.go | 10 +- .../integration/mutation/mix/with_txn_test.go | 80 ++--- .../special/invalid_operation_test.go | 2 +- .../field_kinds/one_to_many/simple_test.go | 32 +- .../one_to_many/with_alias_test.go | 48 +-- .../field_kinds/one_to_one/with_alias_test.go | 46 +-- .../one_to_one/with_simple_test.go | 58 ++-- .../update/underscored_schema_test.go | 1 + .../mutation/update/with_delete_test.go | 2 +- .../mutation/update/with_filter_test.go | 2 +- .../mutation/update/with_id_test.go | 10 +- .../mutation/update/with_ids_test.go | 2 +- tests/integration/net/order/tcp_test.go | 2 +- tests/integration/net/order/utils.go | 42 +-- .../peer/subscribe/with_add_get_test.go | 2 + .../simple/replicator/with_create_test.go | 20 +- .../integration/query/commits/simple_test.go | 32 +- .../query/commits/with_cid_test.go | 10 +- .../query/commits/with_depth_test.go | 34 +- ...ey_cid_test.go => with_doc_id_cid_test.go} | 22 +- ...ount_test.go => with_doc_id_count_test.go} | 12 +- ...ield_test.go => with_doc_id_field_test.go} | 36 +- ...est.go => with_doc_id_group_order_test.go} | 12 +- ...st.go => with_doc_id_limit_offset_test.go} | 10 +- ...imit_test.go => with_doc_id_limit_test.go} | 10 +- ...=> with_doc_id_order_limit_offset_test.go} | 10 +- ...rder_test.go => with_doc_id_order_test.go} | 96 +++--- ..._prop_test.go => with_doc_id_prop_test.go} | 12 +- ...ith_dockey_test.go => with_doc_id_test.go} | 76 ++--- ...e_test.go => with_doc_id_typename_test.go} | 12 +- .../query/commits/with_field_test.go | 14 +- .../query/commits/with_group_test.go | 28 +- .../query/latest_commits/simple_test.go | 2 +- .../with_collectionid_prop_test.go | 4 +- ...ield_test.go => with_doc_id_field_test.go} | 26 +- ..._prop_test.go => with_doc_id_prop_test.go} | 10 +- ...ith_dockey_test.go => with_doc_id_test.go} | 22 +- .../query/latest_commits/with_field_test.go | 4 +- ...dockey_test.go => with_cid_doc_id_test.go} | 53 +-- ...ith_dockey_test.go => with_doc_id_test.go} | 6 +- ...h_dockeys_test.go => with_doc_ids_test.go} | 6 +- .../with_filter_related_id_test.go | 6 +- .../with_group_related_id_alias_test.go | 28 +- .../query/one_to_many_to_many/joins_test.go | 66 ++-- .../query/one_to_many_to_one/joins_test.go | 62 ++-- tests/integration/query/simple/simple_test.go | 8 +- ...dockey_test.go => with_cid_doc_id_test.go} | 73 ++-- ...key_test.go => with_doc_id_filter_test.go} | 6 +- ...ith_dockey_test.go => with_doc_id_test.go} | 14 +- ...h_dockeys_test.go => with_doc_ids_test.go} | 24 +- ...ckey_test.go => with_group_doc_id_test.go} | 6 +- ...eys_test.go => with_group_doc_ids_test.go} | 6 +- .../query/simple/with_version_test.go | 30 +- .../schema/aggregates/inline_array_test.go | 2 +- tests/integration/schema/default_fields.go | 20 +- tests/integration/schema/filter_test.go | 12 +- tests/integration/schema/get_schema_test.go | 96 +++--- tests/integration/schema/group_test.go | 4 +- tests/integration/schema/input_type_test.go | 12 +- .../schema/migrations/query/simple_test.go | 48 +-- ...ith_dockey_test.go => with_doc_id_test.go} | 32 +- .../schema/migrations/query/with_p2p_test.go | 10 +- .../migrations/query/with_restart_test.go | 4 +- .../migrations/query/with_set_default_test.go | 12 +- .../schema/migrations/query/with_txn_test.go | 8 +- .../migrations/query/with_update_test.go | 8 +- .../schema/migrations/simple_test.go | 8 +- tests/integration/schema/simple_test.go | 6 +- .../schema/updates/add/field/create_test.go | 22 +- .../updates/add/field/create_update_test.go | 8 +- .../kind/{dockey_test.go => doc_id_test.go} | 12 +- .../field/kind/foreign_object_array_test.go | 4 +- .../add/field/kind/foreign_object_test.go | 4 +- .../schema/updates/add/field/simple_test.go | 16 +- .../schema/updates/move/simple_test.go | 2 +- .../schema/with_update_set_default_test.go | 4 +- .../subscription/subscription_test.go | 42 +-- tests/integration/utils2.go | 20 +- tests/predefined/gen_predefined.go | 26 +- tests/predefined/gen_predefined_test.go | 88 ++--- tests/predefined/util_test.go | 12 +- version/version.go | 20 +- version/version_test.go | 8 +- 281 files changed, 3026 insertions(+), 3212 deletions(-) rename cli/{collection_keys.go => collection_list_doc_ids.go} (65%) rename client/{dockey.go => doc_id.go} (50%) rename docs/cli/{defradb_client_collection_keys.md => defradb_client_collection_docIDs.md} (83%) delete mode 100644 docs/cli/defradb_client_document.md delete mode 100644 docs/cli/defradb_client_document_create.md delete mode 100644 docs/cli/defradb_client_document_delete.md delete mode 100644 docs/cli/defradb_client_document_save.md delete mode 100644 docs/cli/defradb_client_document_update.md rename docs/cli/{defradb_client_document_get.md => defradb_client_view.md} (63%) rename docs/cli/{defradb_client_document_keys.md => defradb_client_view_add.md} (66%) create mode 100644 docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md rename tests/integration/collection/update/simple/{with_key_test.go => with_doc_id_test.go} (81%) rename tests/integration/collection/update/simple/{with_keys_test.go => with_doc_ids_test.go} (79%) rename tests/integration/explain/debug/{group_with_dockey_child_test.go => group_with_doc_id_child_test.go} (80%) rename tests/integration/explain/debug/{group_with_dockey_test.go => group_with_doc_id_test.go} (78%) rename tests/integration/explain/debug/{type_join_with_filter_and_key_test.go => type_join_with_filter_doc_id_test.go} (90%) rename tests/integration/explain/debug/{with_filter_key_test.go => with_filter_doc_id_test.go} (72%) rename tests/integration/explain/default/{group_with_dockey_child_test.go => group_with_doc_id_child_test.go} (86%) rename tests/integration/explain/default/{group_with_dockey_test.go => group_with_doc_id_test.go} (89%) rename tests/integration/explain/default/{type_join_with_filter_and_key_test.go => type_join_with_filter_doc_id_test.go} (92%) rename tests/integration/explain/default/{with_filter_key_test.go => with_filter_doc_id_test.go} (85%) rename tests/integration/query/commits/{with_dockey_cid_test.go => with_doc_id_cid_test.go} (72%) rename tests/integration/query/commits/{with_dockey_count_test.go => with_doc_id_count_test.go} (68%) rename tests/integration/query/commits/{with_dockey_field_test.go => with_doc_id_field_test.go} (66%) rename tests/integration/query/commits/{with_dockey_group_order_test.go => with_doc_id_group_order_test.go} (79%) rename tests/integration/query/commits/{with_dockey_limit_offset_test.go => with_doc_id_limit_offset_test.go} (75%) rename tests/integration/query/commits/{with_dockey_limit_test.go => with_doc_id_limit_test.go} (75%) rename tests/integration/query/commits/{with_dockey_order_limit_offset_test.go => with_doc_id_order_limit_offset_test.go} (74%) rename tests/integration/query/commits/{with_dockey_order_test.go => with_doc_id_order_test.go} (55%) rename tests/integration/query/commits/{with_dockey_prop_test.go => with_doc_id_prop_test.go} (74%) rename tests/integration/query/commits/{with_dockey_test.go => with_doc_id_test.go} (58%) rename tests/integration/query/commits/{with_dockey_typename_test.go => with_doc_id_typename_test.go} (69%) rename tests/integration/query/latest_commits/{with_dockey_field_test.go => with_doc_id_field_test.go} (65%) rename tests/integration/query/latest_commits/{with_dockey_prop_test.go => with_doc_id_prop_test.go} (75%) rename tests/integration/query/latest_commits/{with_dockey_test.go => with_doc_id_test.go} (58%) rename tests/integration/query/one_to_many/{with_cid_dockey_test.go => with_cid_doc_id_test.go} (81%) rename tests/integration/query/one_to_many/{with_dockey_test.go => with_doc_id_test.go} (91%) rename tests/integration/query/one_to_many/{with_dockeys_test.go => with_doc_ids_test.go} (90%) rename tests/integration/query/simple/{with_cid_dockey_test.go => with_cid_doc_id_test.go} (67%) rename tests/integration/query/simple/{with_key_test.go => with_doc_id_filter_test.go} (79%) rename tests/integration/query/simple/{with_dockey_test.go => with_doc_id_test.go} (73%) rename tests/integration/query/simple/{with_dockeys_test.go => with_doc_ids_test.go} (68%) rename tests/integration/query/simple/{with_group_dockey_test.go => with_group_doc_id_test.go} (85%) rename tests/integration/query/simple/{with_group_dockeys_test.go => with_group_doc_ids_test.go} (83%) rename tests/integration/schema/migrations/query/{with_dockey_test.go => with_doc_id_test.go} (81%) rename tests/integration/schema/updates/add/field/kind/{dockey_test.go => doc_id_test.go} (83%) diff --git a/Makefile b/Makefile index 5dddc2872e..0ddde9790f 100644 --- a/Makefile +++ b/Makefile @@ -217,11 +217,26 @@ test\:cli: test\:names: gotestsum --format testname -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) +.PHONY: test\:lens +test\:lens: + @$(MAKE) deps:lens + gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... $(TEST_FLAGS) + +.PHONY: test\:lens-quick +test\:lens-quick: + @$(MAKE) deps:lens + gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... + .PHONY: test\:all test\:all: @$(MAKE) test:names @$(MAKE) test:lens +.PHONY: test\:all-quick +test\:all-quick: + @$(MAKE) test:quick + @$(MAKE) test:lens-quick + .PHONY: test\:verbose test\:verbose: gotestsum --format standard-verbose -- $(DEFAULT_TEST_DIRECTORIES) $(TEST_FLAGS) @@ -246,11 +261,6 @@ test\:bench-short: test\:scripts: @$(MAKE) -C ./tools/scripts/ test -.PHONY: test\:lens -test\:lens: - @$(MAKE) deps:lens - gotestsum --format testname -- ./$(LENS_TEST_DIRECTORY)/... $(TEST_FLAGS) - .PHONY: test\:coverage test\:coverage: @$(MAKE) deps:lens diff --git a/README.md b/README.md index 8ae2ebfb44..acc438273b 100644 --- a/README.md +++ b/README.md @@ -101,7 +101,7 @@ Submit a `mutation` request to create a document of the `User` type: defradb client query ' mutation { create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key + _docID } } ' @@ -113,13 +113,13 @@ Expected response: { "data": [ { - "_key": "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", + "_docID": "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab", } ] } ``` -`_key` is the document's key, a unique identifier of the document, determined by its schema and initial data. +`_docID` is the document's unique identifier determined by its schema and initial data. ## Query documents @@ -129,7 +129,7 @@ Once you have populated your node with data, you can query it: defradb client query ' query { User { - _key + _docID age name points @@ -138,7 +138,7 @@ defradb client query ' ' ``` -This query obtains *all* users and returns their fields `_key, age, name, points`. GraphQL queries only return the exact fields requested. +This query obtains *all* users and returns their fields `_docID, age, name, points`. GraphQL queries only return the exact fields requested. You can further filter results with the `filter` argument. @@ -146,7 +146,7 @@ You can further filter results with the `filter` argument. defradb client query ' query { User(filter: {points: {_ge: 50}}) { - _key + _docID age name points @@ -166,7 +166,7 @@ To get the most recent commit in the MerkleDAG for the document identified as `b ```shell defradb client query ' query { - latestCommits(dockey: "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab") { + latestCommits(docID: "bae-91171025-ed21-50e3-b0dc-e31bccdfa1ab") { cid delta height diff --git a/cli/cli.go b/cli/cli.go index 8827424334..2ee882afce 100644 --- a/cli/cli.go +++ b/cli/cli.go @@ -91,7 +91,7 @@ func NewDefraCommand(cfg *config.Config) *cobra.Command { collection := MakeCollectionCommand(cfg) collection.AddCommand( MakeCollectionGetCommand(), - MakeCollectionKeysCommand(), + MakeCollectionListDocIDsCommand(), MakeCollectionDeleteCommand(), MakeCollectionUpdateCommand(), MakeCollectionCreateCommand(), diff --git a/cli/collection_delete.go b/cli/collection_delete.go index 85539d5eb3..dcd7c9d872 100644 --- a/cli/collection_delete.go +++ b/cli/collection_delete.go @@ -17,15 +17,15 @@ import ( ) func MakeCollectionDeleteCommand() *cobra.Command { - var keys []string + var argDocIDs []string var filter string var cmd = &cobra.Command{ - Use: "delete [--filter --key ]", - Short: "Delete documents by key or filter.", - Long: `Delete documents by key or filter and lists the number of documents deleted. + Use: "delete [--filter --docID ]", + Short: "Delete documents by docID or filter.", + Long: `Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by key(s) - defradb client collection delete --name User --key bae-123,bae-456 +Example: delete by docID(s) + defradb client collection delete --name User --docID bae-123,bae-456 Example: delete by filter defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' @@ -37,26 +37,26 @@ Example: delete by filter } switch { - case len(keys) == 1: - docKey, err := client.NewDocKeyFromString(keys[0]) + case len(argDocIDs) == 1: + docID, err := client.NewDocIDFromString(argDocIDs[0]) if err != nil { return err } - res, err := col.DeleteWithKey(cmd.Context(), docKey) + res, err := col.DeleteWithDocID(cmd.Context(), docID) if err != nil { return err } return writeJSON(cmd, res) - case len(keys) > 1: - docKeys := make([]client.DocKey, len(keys)) - for i, v := range keys { - docKey, err := client.NewDocKeyFromString(v) + case len(argDocIDs) > 1: + docIDs := make([]client.DocID, len(argDocIDs)) + for i, v := range argDocIDs { + docID, err := client.NewDocIDFromString(v) if err != nil { return err } - docKeys[i] = docKey + docIDs[i] = docID } - res, err := col.DeleteWithKeys(cmd.Context(), docKeys) + res, err := col.DeleteWithDocIDs(cmd.Context(), docIDs) if err != nil { return err } @@ -68,11 +68,11 @@ Example: delete by filter } return writeJSON(cmd, res) default: - return ErrNoDocKeyOrFilter + return ErrNoDocIDOrFilter } }, } - cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") + cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") return cmd } diff --git a/cli/collection_get.go b/cli/collection_get.go index d908bbdb7a..d753e0a8db 100644 --- a/cli/collection_get.go +++ b/cli/collection_get.go @@ -19,7 +19,7 @@ import ( func MakeCollectionGetCommand() *cobra.Command { var showDeleted bool var cmd = &cobra.Command{ - Use: "get [--show-deleted]", + Use: "get [--show-deleted]", Short: "View document fields.", Long: `View document fields. @@ -33,11 +33,11 @@ Example: return cmd.Usage() } - docKey, err := client.NewDocKeyFromString(args[0]) + docID, err := client.NewDocIDFromString(args[0]) if err != nil { return err } - doc, err := col.Get(cmd.Context(), docKey, showDeleted) + doc, err := col.Get(cmd.Context(), docID, showDeleted) if err != nil { return err } diff --git a/cli/collection_keys.go b/cli/collection_list_doc_ids.go similarity index 65% rename from cli/collection_keys.go rename to cli/collection_list_doc_ids.go index a453c16a86..d7009cb300 100644 --- a/cli/collection_keys.go +++ b/cli/collection_list_doc_ids.go @@ -16,14 +16,14 @@ import ( "github.com/sourcenetwork/defradb/http" ) -func MakeCollectionKeysCommand() *cobra.Command { +func MakeCollectionListDocIDsCommand() *cobra.Command { var cmd = &cobra.Command{ - Use: "keys", - Short: "List all document keys.", - Long: `List all document keys. + Use: "docIDs", + Short: "List all document IDs (docIDs).", + Long: `List all document IDs (docIDs). Example: - defradb client collection keys --name User + defradb client collection docIDs --name User `, RunE: func(cmd *cobra.Command, args []string) error { col, ok := tryGetCollectionContext(cmd) @@ -31,16 +31,16 @@ Example: return cmd.Usage() } - docCh, err := col.GetAllDocKeys(cmd.Context()) + docCh, err := col.GetAllDocIDs(cmd.Context()) if err != nil { return err } - for docKey := range docCh { - results := &http.DocKeyResult{ - Key: docKey.Key.String(), + for docIDResult := range docCh { + results := &http.DocIDResult{ + DocID: docIDResult.ID.String(), } - if docKey.Err != nil { - results.Error = docKey.Err.Error() + if docIDResult.Err != nil { + results.Error = docIDResult.Err.Error() } if err := writeJSON(cmd, results); err != nil { return err diff --git a/cli/collection_update.go b/cli/collection_update.go index 317a2e8119..9fd2deed3f 100644 --- a/cli/collection_update.go +++ b/cli/collection_update.go @@ -17,24 +17,24 @@ import ( ) func MakeCollectionUpdateCommand() *cobra.Command { - var keys []string + var argDocIDs []string var filter string var updater string var cmd = &cobra.Command{ - Use: "update [--filter --key --updater ] ", - Short: "Update documents by key or filter.", - Long: `Update documents by key or filter. + Use: "update [--filter --docID --updater ] ", + Short: "Update documents by docID or filter.", + Long: `Update documents by docID or filter. Example: update from string - defradb client collection update --name User --key bae-123 '{ "name": "Bob" }' + defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' Example: update by filter defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by keys +Example: update by docIDs defradb client collection update --name User \ - --key bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123,bae-456 --updater '{ "verified": true }' `, Args: cobra.RangeArgs(0, 1), RunE: func(cmd *cobra.Command, args []string) error { @@ -44,26 +44,26 @@ Example: update by keys } switch { - case len(keys) == 1 && updater != "": - docKey, err := client.NewDocKeyFromString(keys[0]) + case len(argDocIDs) == 1 && updater != "": + docID, err := client.NewDocIDFromString(argDocIDs[0]) if err != nil { return err } - res, err := col.UpdateWithKey(cmd.Context(), docKey, updater) + res, err := col.UpdateWithDocID(cmd.Context(), docID, updater) if err != nil { return err } return writeJSON(cmd, res) - case len(keys) > 1 && updater != "": - docKeys := make([]client.DocKey, len(keys)) - for i, v := range keys { - docKey, err := client.NewDocKeyFromString(v) + case len(argDocIDs) > 1 && updater != "": + docIDs := make([]client.DocID, len(argDocIDs)) + for i, v := range argDocIDs { + docID, err := client.NewDocIDFromString(v) if err != nil { return err } - docKeys[i] = docKey + docIDs[i] = docID } - res, err := col.UpdateWithKeys(cmd.Context(), docKeys, updater) + res, err := col.UpdateWithDocIDs(cmd.Context(), docIDs, updater) if err != nil { return err } @@ -74,12 +74,12 @@ Example: update by keys return err } return writeJSON(cmd, res) - case len(keys) == 1 && len(args) == 1: - docKey, err := client.NewDocKeyFromString(keys[0]) + case len(argDocIDs) == 1 && len(args) == 1: + docID, err := client.NewDocIDFromString(argDocIDs[0]) if err != nil { return err } - doc, err := col.Get(cmd.Context(), docKey, true) + doc, err := col.Get(cmd.Context(), docID, true) if err != nil { return err } @@ -88,11 +88,11 @@ Example: update by keys } return col.Update(cmd.Context(), doc) default: - return ErrNoDocKeyOrFilter + return ErrNoDocIDOrFilter } }, } - cmd.Flags().StringSliceVar(&keys, "key", nil, "Document key") + cmd.Flags().StringSliceVar(&argDocIDs, "docID", nil, "Document ID") cmd.Flags().StringVar(&filter, "filter", "", "Document filter") cmd.Flags().StringVar(&updater, "updater", "", "Document updater") return cmd diff --git a/cli/errors.go b/cli/errors.go index ee89a63249..bb124bc7f9 100644 --- a/cli/errors.go +++ b/cli/errors.go @@ -22,7 +22,7 @@ const ( var ( ErrNoDocOrFile = errors.New("document or file must be defined") ErrInvalidDocument = errors.New("invalid document") - ErrNoDocKeyOrFilter = errors.New("document key or filter must be defined") + ErrNoDocIDOrFilter = errors.New("docID or filter must be defined") ErrInvalidExportFormat = errors.New("invalid export format") ErrNoLensConfig = errors.New("lens config cannot be empty") ErrInvalidLensConfig = errors.New("invalid lens configuration") diff --git a/cli/version_test.go b/cli/version_test.go index 4f62f3659b..fdc6aba6e6 100644 --- a/cli/version_test.go +++ b/cli/version_test.go @@ -42,7 +42,7 @@ func TestVersionFull(t *testing.T) { assert.NoError(t, err) t.Log(buf.String()) assert.Contains(t, buf.String(), "* HTTP API") - assert.Contains(t, buf.String(), "* DocKey versions") + assert.Contains(t, buf.String(), "* DocID versions") assert.Contains(t, buf.String(), "* P2P multicodec") } @@ -59,11 +59,11 @@ func TestVersionJSON(t *testing.T) { { "release": "", "commit": "", - "commitdate": "", + "commitDate": "", "go": "", - "httpapi": "v0", - "dockeyversions": "1", - "netprotocol": "/defra/0.0.1" + "httpAPI": "v0", + "docIDVersions": "1", + "netProtocol": "/defra/0.0.1" }`) } @@ -80,10 +80,10 @@ func TestVersionJSONFull(t *testing.T) { { "release": "", "commit": "", - "commitdate": "", + "commitDate": "", "go": "", - "httpapi": "v0", - "dockeyversions": "1", - "netprotocol": "/defra/0.0.1" + "httpAPI": "v0", + "docIDVersions": "1", + "netProtocol": "/defra/0.0.1" }`) } diff --git a/client/collection.go b/client/collection.go index 3e6bb64cc4..3a42871c62 100644 --- a/client/collection.go +++ b/client/collection.go @@ -33,26 +33,32 @@ type CollectionDefinition struct { type Collection interface { // Name returns the name of this collection. Name() string + // ID returns the ID of this Collection. ID() uint32 + // SchemaRoot returns the Root of the Schema used to define this Collection. SchemaRoot() string // Definition contains the metadata defining what a Collection is. Definition() CollectionDefinition + // Schema returns the SchemaDescription used to define this Collection. Schema() SchemaDescription + // Description returns the CollectionDescription of this Collection. Description() CollectionDescription // Create a new document. // - // Will verify the DocKey/CID to ensure that the new document is correctly formatted. + // Will verify the DocID/CID to ensure that the new document is correctly formatted. Create(context.Context, *Document) error + // CreateMany new documents. // - // Will verify the DocKeys/CIDs to ensure that the new documents are correctly formatted. + // Will verify the DocIDs/CIDs to ensure that the new documents are correctly formatted. CreateMany(context.Context, []*Document) error + // Update an existing document with the new values. // // Any field that needs to be removed or cleared should call doc.Clear(field) before. @@ -60,93 +66,102 @@ type Collection interface { // // Will return a ErrDocumentNotFound error if the given document is not found. Update(context.Context, *Document) error + // Save the given document in the database. // - // If a document exists with the given DocKey it will update it. Otherwise a new document + // If a document exists with the given DocID it will update it. Otherwise a new document // will be created. Save(context.Context, *Document) error - // Delete will attempt to delete a document by key. + + // Delete will attempt to delete a document by DocID. // // Will return true if a deletion is successful, and return false along with an error // if it cannot. If the document doesn't exist, then it will return false and a ErrDocumentNotFound error. - // This operation will hard-delete all state relating to the given DocKey. This includes data, block, and head storage. - Delete(context.Context, DocKey) (bool, error) - // Exists checks if a given document exists with supplied DocKey. + // This operation will hard-delete all state relating to the given DocID. This includes data, block, and head storage. + Delete(context.Context, DocID) (bool, error) + + // Exists checks if a given document exists with supplied DocID. // // Will return true if a matching document exists, otherwise will return false. - Exists(context.Context, DocKey) (bool, error) + Exists(context.Context, DocID) (bool, error) // UpdateWith updates a target document using the given updater type. // - // Target can be a Filter statement, a single docKey, a single document, - // an array of docKeys, or an array of documents. + // Target can be a Filter statement, a single DocID, a single document, + // an array of DocIDs, or an array of documents. // It is recommended to use the respective typed versions of Update - // (e.g. UpdateWithFilter or UpdateWithKey) over this function if you can. + // (e.g. UpdateWithFilter or UpdateWithDocID) over this function if you can. // // Returns an ErrInvalidUpdateTarget error if the target type is not supported. // Returns an ErrInvalidUpdater error if the updater type is not supported. UpdateWith(ctx context.Context, target any, updater string) (*UpdateResult, error) + // UpdateWithFilter updates using a filter to target documents for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. UpdateWithFilter(ctx context.Context, filter any, updater string) (*UpdateResult, error) - // UpdateWithKey updates using a DocKey to target a single document for update. + + // UpdateWithDocID updates using a DocID to target a single document for update. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. // - // Returns an ErrDocumentNotFound if a document matching the given DocKey is not found. - UpdateWithKey(ctx context.Context, key DocKey, updater string) (*UpdateResult, error) - // UpdateWithKeys updates documents matching the given DocKeys. + // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. + UpdateWithDocID(ctx context.Context, docID DocID, updater string) (*UpdateResult, error) + + // UpdateWithDocIDs updates documents matching the given DocIDs. // // The provided updater must be a string Patch, string Merge Patch, a parsed Patch, or parsed Merge Patch // else an ErrInvalidUpdater will be returned. // - // Returns an ErrDocumentNotFound if a document is not found for any given DocKey. - UpdateWithKeys(context.Context, []DocKey, string) (*UpdateResult, error) + // Returns an ErrDocumentNotFound if a document is not found for any given DocID. + UpdateWithDocIDs(context.Context, []DocID, string) (*UpdateResult, error) // DeleteWith deletes a target document. // - // Target can be a Filter statement, a single docKey, a single document, an array of docKeys, + // Target can be a Filter statement, a single DocID, a single document, an array of DocIDs, // or an array of documents. It is recommended to use the respective typed versions of Delete - // (e.g. DeleteWithFilter or DeleteWithKey) over this function if you can. - // This operation will soft-delete documents related to the given DocKey and update the composite block + // (e.g. DeleteWithFilter or DeleteWithDocID) over this function if you can. + // This operation will soft-delete documents related to the given DocID and update the composite block // with a status of `Deleted`. // // Returns an ErrInvalidDeleteTarget if the target type is not supported. DeleteWith(ctx context.Context, target any) (*DeleteResult, error) + // DeleteWithFilter deletes documents matching the given filter. // // This operation will soft-delete documents related to the given filter and update the composite block // with a status of `Deleted`. DeleteWithFilter(ctx context.Context, filter any) (*DeleteResult, error) - // DeleteWithKey deletes using a DocKey to target a single document for delete. + + // DeleteWithDocID deletes using a DocID to target a single document for delete. // - // This operation will soft-delete documents related to the given DocKey and update the composite block + // This operation will soft-delete documents related to the given DocID and update the composite block // with a status of `Deleted`. // - // Returns an ErrDocumentNotFound if a document matching the given DocKey is not found. - DeleteWithKey(context.Context, DocKey) (*DeleteResult, error) - // DeleteWithKeys deletes documents matching the given DocKeys. + // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. + DeleteWithDocID(context.Context, DocID) (*DeleteResult, error) + + // DeleteWithDocIDs deletes documents matching the given DocIDs. // - // This operation will soft-delete documents related to the given DocKeys and update the composite block + // This operation will soft-delete documents related to the given DocIDs and update the composite block // with a status of `Deleted`. // - // Returns an ErrDocumentNotFound if a document is not found for any given DocKey. - DeleteWithKeys(context.Context, []DocKey) (*DeleteResult, error) + // Returns an ErrDocumentNotFound if a document is not found for any given DocID. + DeleteWithDocIDs(context.Context, []DocID) (*DeleteResult, error) - // Get returns the document with the given DocKey. + // Get returns the document with the given DocID. // - // Returns an ErrDocumentNotFound if a document matching the given DocKey is not found. - Get(ctx context.Context, key DocKey, showDeleted bool) (*Document, error) + // Returns an ErrDocumentNotFound if a document matching the given DocID is not found. + Get(ctx context.Context, docID DocID, showDeleted bool) (*Document, error) // WithTxn returns a new instance of the collection, with a transaction // handle instead of a raw DB handle. WithTxn(datastore.Txn) Collection - // GetAllDocKeys returns all the document keys that exist in the collection. - GetAllDocKeys(ctx context.Context) (<-chan DocKeysResult, error) + // GetAllDocIDs returns all the document IDs that exist in the collection. + GetAllDocIDs(ctx context.Context) (<-chan DocIDResult, error) // CreateIndex creates a new index on the collection. // `IndexDescription` contains the description of the index to be created. @@ -162,11 +177,11 @@ type Collection interface { GetIndexes(ctx context.Context) ([]IndexDescription, error) } -// DocKeysResult wraps the result of an attempt at a DocKey retrieval operation. -type DocKeysResult struct { - // If a DocKey was successfully retrieved, this will be that key. - Key DocKey - // If an error was generated whilst attempting to retrieve the DocKey, this will be the error. +// DocIDResult wraps the result of an attempt at a DocID retrieval operation. +type DocIDResult struct { + // If a DocID was successfully retrieved, this will be that DocID. + ID DocID + // If an error was generated whilst attempting to retrieve the DocID, this will be the error. Err error } @@ -174,16 +189,16 @@ type DocKeysResult struct { type UpdateResult struct { // Count contains the number of documents updated by the update call. Count int64 - // DocKeys contains the DocKeys of all the documents updated by the update call. - DocKeys []string + // DocIDs contains the DocIDs of all the documents updated by the update call. + DocIDs []string } // DeleteResult wraps the result of an delete call. type DeleteResult struct { // Count contains the number of documents deleted by the delete call. Count int64 - // DocKeys contains the DocKeys of all the documents deleted by the delete call. - DocKeys []string + // DocIDs contains the DocIDs of all the documents deleted by the delete call. + DocIDs []string } // P2PCollection is the gRPC response representation of a P2P collection topic diff --git a/client/descriptions.go b/client/descriptions.go index 7ab7cc0982..7a4ec0ba7e 100644 --- a/client/descriptions.go +++ b/client/descriptions.go @@ -127,7 +127,7 @@ type FieldKind uint8 func (f FieldKind) String() string { switch f { - case FieldKind_DocKey: + case FieldKind_DocID: return "ID" case FieldKind_BOOL: return "Boolean" @@ -165,7 +165,7 @@ func (f FieldKind) String() string { // Note: These values are serialized and persisted in the database, avoid modifying existing values. const ( FieldKind_None FieldKind = 0 - FieldKind_DocKey FieldKind = 1 + FieldKind_DocID FieldKind = 1 FieldKind_BOOL FieldKind = 2 FieldKind_BOOL_ARRAY FieldKind = 3 FieldKind_INT FieldKind = 4 @@ -201,7 +201,7 @@ const ( // in the future. They currently roughly correspond to the GQL field types, but this // equality is not guaranteed. var FieldKindStringToEnumMapping = map[string]FieldKind{ - "ID": FieldKind_DocKey, + "ID": FieldKind_DocID, "Boolean": FieldKind_BOOL, "[Boolean]": FieldKind_NILLABLE_BOOL_ARRAY, "[Boolean!]": FieldKind_BOOL_ARRAY, @@ -280,7 +280,7 @@ type FieldDescription struct { // IsInternal returns true if this field is internally generated. func (f FieldDescription) IsInternal() bool { - return (f.Name == "_key") || f.RelationType&Relation_Type_INTERNAL_ID != 0 + return (f.Name == request.DocIDFieldName) || f.RelationType&Relation_Type_INTERNAL_ID != 0 } // IsObject returns true if this field is an object type. diff --git a/client/dockey.go b/client/doc_id.go similarity index 50% rename from client/dockey.go rename to client/doc_id.go index 421820d341..601a6ed791 100644 --- a/client/dockey.go +++ b/client/doc_id.go @@ -20,14 +20,14 @@ import ( mbase "github.com/multiformats/go-multibase" ) -// DocKey versions. +// DocID versions. const ( - DocKeyV0 = 0x01 + DocIDV0 = 0x01 ) -// ValidDocKeyVersions is a map of DocKey versions and their current validity. -var ValidDocKeyVersions = map[uint16]bool{ - DocKeyV0: true, +// ValidDocIDVersions is a map of DocID versions and their current validity. +var ValidDocIDVersions = map[uint16]bool{ + DocIDV0: true, } var ( @@ -35,69 +35,69 @@ var ( SDNNamespaceV0 = uuid.Must(uuid.FromString("c94acbfa-dd53-40d0-97f3-29ce16c333fc")) ) -// DocKey is the root key identifier for documents in DefraDB. -type DocKey struct { +// DocID is the root identifier for documents in DefraDB. +type DocID struct { version uint16 uuid uuid.UUID cid cid.Cid } -// NewDocKeyV0 creates a new dockey identified by the root data CID,peerID, and namespaced by the versionNS. -func NewDocKeyV0(dataCID cid.Cid) DocKey { - return DocKey{ - version: DocKeyV0, +// NewDocIDV0 creates a new DocID identified by the root data CID, peerID, and namespaced by the versionNS. +func NewDocIDV0(dataCID cid.Cid) DocID { + return DocID{ + version: DocIDV0, uuid: uuid.NewV5(SDNNamespaceV0, dataCID.String()), cid: dataCID, } } -// NewDocKeyFromString creates a new DocKey from a string. -func NewDocKeyFromString(key string) (DocKey, error) { - parts := strings.SplitN(key, "-", 2) +// NewDocIDFromString creates a new DocID from a string. +func NewDocIDFromString(docID string) (DocID, error) { + parts := strings.SplitN(docID, "-", 2) if len(parts) != 2 { - return DocKey{}, ErrMalformedDocKey + return DocID{}, ErrMalformedDocID } versionStr := parts[0] _, data, err := mbase.Decode(versionStr) if err != nil { - return DocKey{}, err + return DocID{}, err } buf := bytes.NewBuffer(data) version, err := binary.ReadUvarint(buf) if err != nil { - return DocKey{}, err + return DocID{}, err } - if _, ok := ValidDocKeyVersions[uint16(version)]; !ok { - return DocKey{}, ErrInvalidDocKeyVersion + if _, ok := ValidDocIDVersions[uint16(version)]; !ok { + return DocID{}, ErrInvalidDocIDVersion } uuid, err := uuid.FromString(parts[1]) if err != nil { - return DocKey{}, err + return DocID{}, err } - return DocKey{ + return DocID{ version: uint16(version), uuid: uuid, }, nil } -// UUID returns the doc key in UUID form. -func (key DocKey) UUID() uuid.UUID { - return key.uuid +// UUID returns the underlying document identifier in UUID form. +func (docID DocID) UUID() uuid.UUID { + return docID.uuid } -// String returns the doc key in string form. -func (key DocKey) String() string { +// String returns the underlying document identifier in string form. +func (docID DocID) String() string { buf := make([]byte, 1) - binary.PutUvarint(buf, uint64(key.version)) + binary.PutUvarint(buf, uint64(docID.version)) versionStr, _ := mbase.Encode(mbase.Base32, buf) - return versionStr + "-" + key.uuid.String() + return versionStr + "-" + docID.uuid.String() } -// Bytes returns the DocKey in Byte format. -func (key DocKey) Bytes() []byte { +// Bytes returns the underlying document identifier in Byte format. +func (docID DocID) Bytes() []byte { buf := make([]byte, binary.MaxVarintLen16) - binary.PutUvarint(buf, uint64(key.version)) - return append(buf, key.uuid.Bytes()...) + binary.PutUvarint(buf, uint64(docID.version)) + return append(buf, docID.uuid.Bytes()...) } diff --git a/client/document.go b/client/document.go index bcb8ae6070..6713f48dd0 100644 --- a/client/document.go +++ b/client/document.go @@ -53,7 +53,7 @@ import ( // @body: A document interface can be implemented by both a TypedDocument and a // UnTypedDocument, which use a schema and schemaless approach respectively. type Document struct { - key DocKey + id DocID // SchemaVersionID holds the id of the schema version that this document is // currently at. // @@ -68,10 +68,10 @@ type Document struct { isDirty bool } -// NewDocWithKey creates a new Document with a specified key. -func NewDocWithKey(key DocKey) *Document { +// NewDocWithID creates a new Document with a specified DocID. +func NewDocWithID(docID DocID) *Document { doc := newEmptyDoc() - doc.key = key + doc.id = docID return doc } @@ -90,15 +90,15 @@ func NewDocFromMap(data map[string]any) (*Document, error) { values: make(map[Field]Value), } - // check if document contains special _key field - k, hasKey := data["_key"] - if hasKey { - delete(data, "_key") // remove the key so it isn't parsed further + // check if document contains special _docID field + k, hasDocID := data[request.DocIDFieldName] + if hasDocID { + delete(data, request.DocIDFieldName) // remove the DocID so it isn't parsed further kstr, ok := k.(string) if !ok { - return nil, NewErrUnexpectedType[string]("data[_key]", k) + return nil, NewErrUnexpectedType[string]("data["+request.DocIDFieldName+"]", k) } - if doc.key, err = NewDocKeyFromString(kstr); err != nil { + if doc.id, err = NewDocIDFromString(kstr); err != nil { return nil, err } } @@ -108,9 +108,9 @@ func NewDocFromMap(data map[string]any) (*Document, error) { return nil, err } - // if no key was specified, then we assume it doesn't exist and we generate, and set it. - if !hasKey { - err = doc.generateAndSetDocKey() + // if no DocID was specified, then we assume it doesn't exist and we generate, and set it. + if !hasDocID { + err = doc.generateAndSetDocID() if err != nil { return nil, err } @@ -144,10 +144,10 @@ func (doc *Document) SetHead(head cid.Cid) { doc.head = head } -// Key returns the generated DocKey for this document. -func (doc *Document) Key() DocKey { - // Reading without a read-lock as we assume the DocKey is immutable - return doc.key +// ID returns the generated DocID for this document. +func (doc *Document) ID() DocID { + // Reading without a read-lock as we assume the DocID is immutable + return doc.id } // Get returns the raw value for a given field. @@ -392,8 +392,7 @@ func (doc *Document) String() (string, error) { return string(j), nil } -// ToMap returns the document as a map[string]any -// object. +// ToMap returns the document as a map[string]any object. func (doc *Document) ToMap() (map[string]any, error) { return doc.toMapWithKey() } @@ -479,42 +478,42 @@ func (doc *Document) toMapWithKey() (map[string]any, error) { docMap[k] = value.Value() } - docMap["_key"] = doc.Key().String() + docMap[request.DocIDFieldName] = doc.ID().String() return docMap, nil } -// GenerateDocKey generates docKey/docID corresponding to the document. -func (doc *Document) GenerateDocKey() (DocKey, error) { +// GenerateDocID generates the DocID corresponding to the document. +func (doc *Document) GenerateDocID() (DocID, error) { bytes, err := doc.Bytes() if err != nil { - return DocKey{}, err + return DocID{}, err } cid, err := ccid.NewSHA256CidV1(bytes) if err != nil { - return DocKey{}, err + return DocID{}, err } - return NewDocKeyV0(cid), nil + return NewDocIDV0(cid), nil } -// setDocKey sets the `doc.key` (should NOT be public). -func (doc *Document) setDocKey(docID DocKey) { +// setDocID sets the `doc.id` (should NOT be public). +func (doc *Document) setDocID(docID DocID) { doc.mu.Lock() defer doc.mu.Unlock() - doc.key = docID + doc.id = docID } -// generateAndSetDocKey generates the docKey/docID and then (re)sets `doc.key`. -func (doc *Document) generateAndSetDocKey() error { - docKey, err := doc.GenerateDocKey() +// generateAndSetDocID generates the DocID and then (re)sets `doc.id`. +func (doc *Document) generateAndSetDocID() error { + docID, err := doc.GenerateDocID() if err != nil { return err } - doc.setDocKey(docKey) + doc.setDocID(docID) return nil } @@ -537,8 +536,8 @@ func (doc *Document) remapAliasFields(fieldDescriptions []FieldDescription) (boo return foundAlias, nil } -// RemapAliasFieldsAndDockey remaps the alias fields and fixes (overwrites) the dockey. -func (doc *Document) RemapAliasFieldsAndDockey(fieldDescriptions []FieldDescription) error { +// RemapAliasFieldsAndDocID remaps the alias fields and fixes (overwrites) the DocID. +func (doc *Document) RemapAliasFieldsAndDocID(fieldDescriptions []FieldDescription) error { foundAlias, err := doc.remapAliasFields(fieldDescriptions) if err != nil { return err @@ -548,8 +547,8 @@ func (doc *Document) RemapAliasFieldsAndDockey(fieldDescriptions []FieldDescript return nil } - // Update the dockey so dockey isn't based on an aliased name of a field. - return doc.generateAndSetDocKey() + // Update the DocID so DocID isn't based on an aliased name of a field. + return doc.generateAndSetDocID() } // DocumentStatus represent the state of the document in the DAG store. diff --git a/client/document_test.go b/client/document_test.go index 9073373cd3..ee15dc5673 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -48,10 +48,10 @@ func TestNewFromJSON(t *testing.T) { if err != nil { t.Error(err) } - objKey := NewDocKeyV0(c) + objKey := NewDocIDV0(c) - if objKey.String() != doc.Key().String() { - t.Errorf("Incorrect doc key. Want %v, have %v", objKey.String(), doc.Key().String()) + if objKey.String() != doc.ID().String() { + t.Errorf("Incorrect document ID. Want %v, have %v", objKey.String(), doc.ID().String()) return } @@ -101,10 +101,10 @@ func TestSetWithJSON(t *testing.T) { if err != nil { t.Error(err) } - objKey := NewDocKeyV0(c) + objKey := NewDocIDV0(c) - if objKey.String() != doc.Key().String() { - t.Errorf("Incorrect doc key. Want %v, have %v", objKey.String(), doc.Key().String()) + if objKey.String() != doc.ID().String() { + t.Errorf("Incorrect document ID. Want %v, have %v", objKey.String(), doc.ID().String()) return } diff --git a/client/errors.go b/client/errors.go index 28161c502d..3d1de52a3d 100644 --- a/client/errors.go +++ b/client/errors.go @@ -32,23 +32,16 @@ const ( // This list is incomplete and undefined errors may also be returned. // Errors returned from this package may be tested against these errors with errors.Is. var ( - ErrFieldNotExist = errors.New(errFieldNotExist) - ErrUnexpectedType = errors.New(errUnexpectedType) - ErrParsingFailed = errors.New(errParsingFailed) - ErrUninitializeProperty = errors.New(errUninitializeProperty) - ErrFieldNotObject = errors.New("trying to access field on a non object type") - ErrValueTypeMismatch = errors.New("value does not match indicated type") - ErrIndexNotFound = errors.New("no index found for given ID") - ErrDocumentNotFound = errors.New("no document for the given key exists") - ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") - ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") - ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") - ErrMalformedDocKey = errors.New("malformed DocKey, missing either version or cid") - ErrInvalidDocKeyVersion = errors.New("invalid DocKey version") - ErrMaxTxnRetries = errors.New(errMaxTxnRetries) - ErrRelationOneSided = errors.New(errRelationOneSided) - ErrCollectionNotFound = errors.New(errCollectionNotFound) - ErrUnknownCRDT = errors.New(errUnknownCRDT) + ErrFieldNotExist = errors.New(errFieldNotExist) + ErrUnexpectedType = errors.New(errUnexpectedType) + ErrFieldNotObject = errors.New("trying to access field on a non object type") + ErrValueTypeMismatch = errors.New("value does not match indicated type") + ErrDocumentNotFound = errors.New("no document for the given ID exists") + ErrInvalidUpdateTarget = errors.New("the target document to update is of invalid type") + ErrInvalidUpdater = errors.New("the updater of a document is of invalid type") + ErrInvalidDeleteTarget = errors.New("the target document to delete is of invalid type") + ErrMalformedDocID = errors.New("malformed document ID, missing either version or cid") + ErrInvalidDocIDVersion = errors.New("invalid document ID version") ) // NewErrFieldNotExist returns an error indicating that the given field does not exist. diff --git a/client/mocks/collection.go b/client/mocks/collection.go index f3d7f58354..b1fac9c243 100644 --- a/client/mocks/collection.go +++ b/client/mocks/collection.go @@ -206,21 +206,21 @@ func (_c *Collection_Definition_Call) RunAndReturn(run func() client.CollectionD } // Delete provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Delete(_a0 context.Context, _a1 client.DocKey) (bool, error) { +func (_m *Collection) Delete(_a0 context.Context, _a1 client.DocID) (bool, error) { ret := _m.Called(_a0, _a1) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) (bool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) bool); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { r0 = rf(_a0, _a1) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) @@ -236,14 +236,14 @@ type Collection_Delete_Call struct { // Delete is a helper method to define mock.On call // - _a0 context.Context -// - _a1 client.DocKey +// - _a1 client.DocID func (_e *Collection_Expecter) Delete(_a0 interface{}, _a1 interface{}) *Collection_Delete_Call { return &Collection_Delete_Call{Call: _e.mock.On("Delete", _a0, _a1)} } -func (_c *Collection_Delete_Call) Run(run func(_a0 context.Context, _a1 client.DocKey)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Delete_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -253,7 +253,7 @@ func (_c *Collection_Delete_Call) Return(_a0 bool, _a1 error) *Collection_Delete return _c } -func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client.DocKey) (bool, error)) *Collection_Delete_Call { +func (_c *Collection_Delete_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Delete_Call { _c.Call.Return(run) return _c } @@ -313,25 +313,25 @@ func (_c *Collection_DeleteWith_Call) RunAndReturn(run func(context.Context, int return _c } -// DeleteWithFilter provides a mock function with given fields: ctx, filter -func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { - ret := _m.Called(ctx, filter) +// DeleteWithDocID provides a mock function with given fields: _a0, _a1 +func (_m *Collection) DeleteWithDocID(_a0 context.Context, _a1 client.DocID) (*client.DeleteResult, error) { + ret := _m.Called(_a0, _a1) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { - return rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (*client.DeleteResult, error)); ok { + return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { - r0 = rf(ctx, filter) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) *client.DeleteResult); ok { + r0 = rf(_a0, _a1) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { - r1 = rf(ctx, filter) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { + r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) } @@ -339,45 +339,45 @@ func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) return r0, r1 } -// Collection_DeleteWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithFilter' -type Collection_DeleteWithFilter_Call struct { +// Collection_DeleteWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocID' +type Collection_DeleteWithDocID_Call struct { *mock.Call } -// DeleteWithFilter is a helper method to define mock.On call -// - ctx context.Context -// - filter interface{} -func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { - return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, filter)} +// DeleteWithDocID is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 client.DocID +func (_e *Collection_Expecter) DeleteWithDocID(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocID_Call { + return &Collection_DeleteWithDocID_Call{Call: _e.mock.On("DeleteWithDocID", _a0, _a1)} } -func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, filter interface{})) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithDocID_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_DeleteWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{})) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } -func (_c *Collection_DeleteWithFilter_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithDocID_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocID_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { +func (_c *Collection_DeleteWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocID_Call { _c.Call.Return(run) return _c } -// DeleteWithKey provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithKey(_a0 context.Context, _a1 client.DocKey) (*client.DeleteResult, error) { +// DeleteWithDocIDs provides a mock function with given fields: _a0, _a1 +func (_m *Collection) DeleteWithDocIDs(_a0 context.Context, _a1 []client.DocID) (*client.DeleteResult, error) { ret := _m.Called(_a0, _a1) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) (*client.DeleteResult, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) (*client.DeleteResult, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) *client.DeleteResult); ok { + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID) *client.DeleteResult); ok { r0 = rf(_a0, _a1) } else { if ret.Get(0) != nil { @@ -385,7 +385,7 @@ func (_m *Collection) DeleteWithKey(_a0 context.Context, _a1 client.DocKey) (*cl } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, []client.DocID) error); ok { r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) @@ -394,54 +394,54 @@ func (_m *Collection) DeleteWithKey(_a0 context.Context, _a1 client.DocKey) (*cl return r0, r1 } -// Collection_DeleteWithKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithKey' -type Collection_DeleteWithKey_Call struct { +// Collection_DeleteWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithDocIDs' +type Collection_DeleteWithDocIDs_Call struct { *mock.Call } -// DeleteWithKey is a helper method to define mock.On call +// DeleteWithDocIDs is a helper method to define mock.On call // - _a0 context.Context -// - _a1 client.DocKey -func (_e *Collection_Expecter) DeleteWithKey(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithKey_Call { - return &Collection_DeleteWithKey_Call{Call: _e.mock.On("DeleteWithKey", _a0, _a1)} +// - _a1 []client.DocID +func (_e *Collection_Expecter) DeleteWithDocIDs(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithDocIDs_Call { + return &Collection_DeleteWithDocIDs_Call{Call: _e.mock.On("DeleteWithDocIDs", _a0, _a1)} } -func (_c *Collection_DeleteWithKey_Call) Run(run func(_a0 context.Context, _a1 client.DocKey)) *Collection_DeleteWithKey_Call { +func (_c *Collection_DeleteWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID)) *Collection_DeleteWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey)) + run(args[0].(context.Context), args[1].([]client.DocID)) }) return _c } -func (_c *Collection_DeleteWithKey_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithKey_Call { +func (_c *Collection_DeleteWithDocIDs_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithDocIDs_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_DeleteWithKey_Call) RunAndReturn(run func(context.Context, client.DocKey) (*client.DeleteResult, error)) *Collection_DeleteWithKey_Call { +func (_c *Collection_DeleteWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID) (*client.DeleteResult, error)) *Collection_DeleteWithDocIDs_Call { _c.Call.Return(run) return _c } -// DeleteWithKeys provides a mock function with given fields: _a0, _a1 -func (_m *Collection) DeleteWithKeys(_a0 context.Context, _a1 []client.DocKey) (*client.DeleteResult, error) { - ret := _m.Called(_a0, _a1) +// DeleteWithFilter provides a mock function with given fields: ctx, filter +func (_m *Collection) DeleteWithFilter(ctx context.Context, filter interface{}) (*client.DeleteResult, error) { + ret := _m.Called(ctx, filter) var r0 *client.DeleteResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey) (*client.DeleteResult, error)); ok { - return rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) (*client.DeleteResult, error)); ok { + return rf(ctx, filter) } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey) *client.DeleteResult); ok { - r0 = rf(_a0, _a1) + if rf, ok := ret.Get(0).(func(context.Context, interface{}) *client.DeleteResult); ok { + r0 = rf(ctx, filter) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.DeleteResult) } } - if rf, ok := ret.Get(1).(func(context.Context, []client.DocKey) error); ok { - r1 = rf(_a0, _a1) + if rf, ok := ret.Get(1).(func(context.Context, interface{}) error); ok { + r1 = rf(ctx, filter) } else { r1 = ret.Error(1) } @@ -449,31 +449,31 @@ func (_m *Collection) DeleteWithKeys(_a0 context.Context, _a1 []client.DocKey) ( return r0, r1 } -// Collection_DeleteWithKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithKeys' -type Collection_DeleteWithKeys_Call struct { +// Collection_DeleteWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DeleteWithFilter' +type Collection_DeleteWithFilter_Call struct { *mock.Call } -// DeleteWithKeys is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocKey -func (_e *Collection_Expecter) DeleteWithKeys(_a0 interface{}, _a1 interface{}) *Collection_DeleteWithKeys_Call { - return &Collection_DeleteWithKeys_Call{Call: _e.mock.On("DeleteWithKeys", _a0, _a1)} +// DeleteWithFilter is a helper method to define mock.On call +// - ctx context.Context +// - filter interface{} +func (_e *Collection_Expecter) DeleteWithFilter(ctx interface{}, filter interface{}) *Collection_DeleteWithFilter_Call { + return &Collection_DeleteWithFilter_Call{Call: _e.mock.On("DeleteWithFilter", ctx, filter)} } -func (_c *Collection_DeleteWithKeys_Call) Run(run func(_a0 context.Context, _a1 []client.DocKey)) *Collection_DeleteWithKeys_Call { +func (_c *Collection_DeleteWithFilter_Call) Run(run func(ctx context.Context, filter interface{})) *Collection_DeleteWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocKey)) + run(args[0].(context.Context), args[1].(interface{})) }) return _c } -func (_c *Collection_DeleteWithKeys_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithKeys_Call { +func (_c *Collection_DeleteWithFilter_Call) Return(_a0 *client.DeleteResult, _a1 error) *Collection_DeleteWithFilter_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_DeleteWithKeys_Call) RunAndReturn(run func(context.Context, []client.DocKey) (*client.DeleteResult, error)) *Collection_DeleteWithKeys_Call { +func (_c *Collection_DeleteWithFilter_Call) RunAndReturn(run func(context.Context, interface{}) (*client.DeleteResult, error)) *Collection_DeleteWithFilter_Call { _c.Call.Return(run) return _c } @@ -563,21 +563,21 @@ func (_c *Collection_DropIndex_Call) RunAndReturn(run func(context.Context, stri } // Exists provides a mock function with given fields: _a0, _a1 -func (_m *Collection) Exists(_a0 context.Context, _a1 client.DocKey) (bool, error) { +func (_m *Collection) Exists(_a0 context.Context, _a1 client.DocID) (bool, error) { ret := _m.Called(_a0, _a1) var r0 bool var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) (bool, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) (bool, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey) bool); ok { + if rf, ok := ret.Get(0).(func(context.Context, client.DocID) bool); ok { r0 = rf(_a0, _a1) } else { r0 = ret.Get(0).(bool) } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, client.DocID) error); ok { r1 = rf(_a0, _a1) } else { r1 = ret.Error(1) @@ -593,14 +593,14 @@ type Collection_Exists_Call struct { // Exists is a helper method to define mock.On call // - _a0 context.Context -// - _a1 client.DocKey +// - _a1 client.DocID func (_e *Collection_Expecter) Exists(_a0 interface{}, _a1 interface{}) *Collection_Exists_Call { return &Collection_Exists_Call{Call: _e.mock.On("Exists", _a0, _a1)} } -func (_c *Collection_Exists_Call) Run(run func(_a0 context.Context, _a1 client.DocKey)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) Run(run func(_a0 context.Context, _a1 client.DocID)) *Collection_Exists_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey)) + run(args[0].(context.Context), args[1].(client.DocID)) }) return _c } @@ -610,30 +610,30 @@ func (_c *Collection_Exists_Call) Return(_a0 bool, _a1 error) *Collection_Exists return _c } -func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, client.DocKey) (bool, error)) *Collection_Exists_Call { +func (_c *Collection_Exists_Call) RunAndReturn(run func(context.Context, client.DocID) (bool, error)) *Collection_Exists_Call { _c.Call.Return(run) return _c } -// Get provides a mock function with given fields: ctx, key, showDeleted -func (_m *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { - ret := _m.Called(ctx, key, showDeleted) +// Get provides a mock function with given fields: ctx, docID, showDeleted +func (_m *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { + ret := _m.Called(ctx, docID, showDeleted) var r0 *client.Document var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, bool) (*client.Document, error)); ok { - return rf(ctx, key, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) (*client.Document, error)); ok { + return rf(ctx, docID, showDeleted) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, bool) *client.Document); ok { - r0 = rf(ctx, key, showDeleted) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, bool) *client.Document); ok { + r0 = rf(ctx, docID, showDeleted) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.Document) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey, bool) error); ok { - r1 = rf(ctx, key, showDeleted) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID, bool) error); ok { + r1 = rf(ctx, docID, showDeleted) } else { r1 = ret.Error(1) } @@ -648,15 +648,15 @@ type Collection_Get_Call struct { // Get is a helper method to define mock.On call // - ctx context.Context -// - key client.DocKey +// - docID client.DocID // - showDeleted bool -func (_e *Collection_Expecter) Get(ctx interface{}, key interface{}, showDeleted interface{}) *Collection_Get_Call { - return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, key, showDeleted)} +func (_e *Collection_Expecter) Get(ctx interface{}, docID interface{}, showDeleted interface{}) *Collection_Get_Call { + return &Collection_Get_Call{Call: _e.mock.On("Get", ctx, docID, showDeleted)} } -func (_c *Collection_Get_Call) Run(run func(ctx context.Context, key client.DocKey, showDeleted bool)) *Collection_Get_Call { +func (_c *Collection_Get_Call) Run(run func(ctx context.Context, docID client.DocID, showDeleted bool)) *Collection_Get_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey), args[2].(bool)) + run(args[0].(context.Context), args[1].(client.DocID), args[2].(bool)) }) return _c } @@ -666,25 +666,25 @@ func (_c *Collection_Get_Call) Return(_a0 *client.Document, _a1 error) *Collecti return _c } -func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, client.DocKey, bool) (*client.Document, error)) *Collection_Get_Call { +func (_c *Collection_Get_Call) RunAndReturn(run func(context.Context, client.DocID, bool) (*client.Document, error)) *Collection_Get_Call { _c.Call.Return(run) return _c } -// GetAllDocKeys provides a mock function with given fields: ctx -func (_m *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +// GetAllDocIDs provides a mock function with given fields: ctx +func (_m *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { ret := _m.Called(ctx) - var r0 <-chan client.DocKeysResult + var r0 <-chan client.DocIDResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context) (<-chan client.DocKeysResult, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context) (<-chan client.DocIDResult, error)); ok { return rf(ctx) } - if rf, ok := ret.Get(0).(func(context.Context) <-chan client.DocKeysResult); ok { + if rf, ok := ret.Get(0).(func(context.Context) <-chan client.DocIDResult); ok { r0 = rf(ctx) } else { if ret.Get(0) != nil { - r0 = ret.Get(0).(<-chan client.DocKeysResult) + r0 = ret.Get(0).(<-chan client.DocIDResult) } } @@ -697,30 +697,30 @@ func (_m *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysR return r0, r1 } -// Collection_GetAllDocKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllDocKeys' -type Collection_GetAllDocKeys_Call struct { +// Collection_GetAllDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllDocIDs' +type Collection_GetAllDocIDs_Call struct { *mock.Call } -// GetAllDocKeys is a helper method to define mock.On call +// GetAllDocIDs is a helper method to define mock.On call // - ctx context.Context -func (_e *Collection_Expecter) GetAllDocKeys(ctx interface{}) *Collection_GetAllDocKeys_Call { - return &Collection_GetAllDocKeys_Call{Call: _e.mock.On("GetAllDocKeys", ctx)} +func (_e *Collection_Expecter) GetAllDocIDs(ctx interface{}) *Collection_GetAllDocIDs_Call { + return &Collection_GetAllDocIDs_Call{Call: _e.mock.On("GetAllDocIDs", ctx)} } -func (_c *Collection_GetAllDocKeys_Call) Run(run func(ctx context.Context)) *Collection_GetAllDocKeys_Call { +func (_c *Collection_GetAllDocIDs_Call) Run(run func(ctx context.Context)) *Collection_GetAllDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context)) }) return _c } -func (_c *Collection_GetAllDocKeys_Call) Return(_a0 <-chan client.DocKeysResult, _a1 error) *Collection_GetAllDocKeys_Call { +func (_c *Collection_GetAllDocIDs_Call) Return(_a0 <-chan client.DocIDResult, _a1 error) *Collection_GetAllDocIDs_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_GetAllDocKeys_Call) RunAndReturn(run func(context.Context) (<-chan client.DocKeysResult, error)) *Collection_GetAllDocKeys_Call { +func (_c *Collection_GetAllDocIDs_Call) RunAndReturn(run func(context.Context) (<-chan client.DocIDResult, error)) *Collection_GetAllDocIDs_Call { _c.Call.Return(run) return _c } @@ -1085,25 +1085,25 @@ func (_c *Collection_UpdateWith_Call) RunAndReturn(run func(context.Context, int return _c } -// UpdateWithFilter provides a mock function with given fields: ctx, filter, updater -func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, filter, updater) +// UpdateWithDocID provides a mock function with given fields: ctx, docID, updater +func (_m *Collection) UpdateWithDocID(ctx context.Context, docID client.DocID, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, docID, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { - return rf(ctx, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(ctx, docID, updater) } - if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { - r0 = rf(ctx, filter, updater) + if rf, ok := ret.Get(0).(func(context.Context, client.DocID, string) *client.UpdateResult); ok { + r0 = rf(ctx, docID, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { - r1 = rf(ctx, filter, updater) + if rf, ok := ret.Get(1).(func(context.Context, client.DocID, string) error); ok { + r1 = rf(ctx, docID, updater) } else { r1 = ret.Error(1) } @@ -1111,55 +1111,55 @@ func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, return r0, r1 } -// Collection_UpdateWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithFilter' -type Collection_UpdateWithFilter_Call struct { +// Collection_UpdateWithDocID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocID' +type Collection_UpdateWithDocID_Call struct { *mock.Call } -// UpdateWithFilter is a helper method to define mock.On call +// UpdateWithDocID is a helper method to define mock.On call // - ctx context.Context -// - filter interface{} +// - docID client.DocID // - updater string -func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { - return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, filter, updater)} +func (_e *Collection_Expecter) UpdateWithDocID(ctx interface{}, docID interface{}, updater interface{}) *Collection_UpdateWithDocID_Call { + return &Collection_UpdateWithDocID_Call{Call: _e.mock.On("UpdateWithDocID", ctx, docID, updater)} } -func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithDocID_Call) Run(run func(ctx context.Context, docID client.DocID, updater string)) *Collection_UpdateWithDocID_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) + run(args[0].(context.Context), args[1].(client.DocID), args[2].(string)) }) return _c } -func (_c *Collection_UpdateWithFilter_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithDocID_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocID_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { +func (_c *Collection_UpdateWithDocID_Call) RunAndReturn(run func(context.Context, client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocID_Call { _c.Call.Return(run) return _c } -// UpdateWithKey provides a mock function with given fields: ctx, key, updater -func (_m *Collection) UpdateWithKey(ctx context.Context, key client.DocKey, updater string) (*client.UpdateResult, error) { - ret := _m.Called(ctx, key, updater) +// UpdateWithDocIDs provides a mock function with given fields: _a0, _a1, _a2 +func (_m *Collection) UpdateWithDocIDs(_a0 context.Context, _a1 []client.DocID, _a2 string) (*client.UpdateResult, error) { + ret := _m.Called(_a0, _a1, _a2) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, string) (*client.UpdateResult, error)); ok { - return rf(ctx, key, updater) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) (*client.UpdateResult, error)); ok { + return rf(_a0, _a1, _a2) } - if rf, ok := ret.Get(0).(func(context.Context, client.DocKey, string) *client.UpdateResult); ok { - r0 = rf(ctx, key, updater) + if rf, ok := ret.Get(0).(func(context.Context, []client.DocID, string) *client.UpdateResult); ok { + r0 = rf(_a0, _a1, _a2) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, client.DocKey, string) error); ok { - r1 = rf(ctx, key, updater) + if rf, ok := ret.Get(1).(func(context.Context, []client.DocID, string) error); ok { + r1 = rf(_a0, _a1, _a2) } else { r1 = ret.Error(1) } @@ -1167,55 +1167,55 @@ func (_m *Collection) UpdateWithKey(ctx context.Context, key client.DocKey, upda return r0, r1 } -// Collection_UpdateWithKey_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithKey' -type Collection_UpdateWithKey_Call struct { +// Collection_UpdateWithDocIDs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithDocIDs' +type Collection_UpdateWithDocIDs_Call struct { *mock.Call } -// UpdateWithKey is a helper method to define mock.On call -// - ctx context.Context -// - key client.DocKey -// - updater string -func (_e *Collection_Expecter) UpdateWithKey(ctx interface{}, key interface{}, updater interface{}) *Collection_UpdateWithKey_Call { - return &Collection_UpdateWithKey_Call{Call: _e.mock.On("UpdateWithKey", ctx, key, updater)} +// UpdateWithDocIDs is a helper method to define mock.On call +// - _a0 context.Context +// - _a1 []client.DocID +// - _a2 string +func (_e *Collection_Expecter) UpdateWithDocIDs(_a0 interface{}, _a1 interface{}, _a2 interface{}) *Collection_UpdateWithDocIDs_Call { + return &Collection_UpdateWithDocIDs_Call{Call: _e.mock.On("UpdateWithDocIDs", _a0, _a1, _a2)} } -func (_c *Collection_UpdateWithKey_Call) Run(run func(ctx context.Context, key client.DocKey, updater string)) *Collection_UpdateWithKey_Call { +func (_c *Collection_UpdateWithDocIDs_Call) Run(run func(_a0 context.Context, _a1 []client.DocID, _a2 string)) *Collection_UpdateWithDocIDs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(client.DocKey), args[2].(string)) + run(args[0].(context.Context), args[1].([]client.DocID), args[2].(string)) }) return _c } -func (_c *Collection_UpdateWithKey_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithKey_Call { +func (_c *Collection_UpdateWithDocIDs_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithDocIDs_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_UpdateWithKey_Call) RunAndReturn(run func(context.Context, client.DocKey, string) (*client.UpdateResult, error)) *Collection_UpdateWithKey_Call { +func (_c *Collection_UpdateWithDocIDs_Call) RunAndReturn(run func(context.Context, []client.DocID, string) (*client.UpdateResult, error)) *Collection_UpdateWithDocIDs_Call { _c.Call.Return(run) return _c } -// UpdateWithKeys provides a mock function with given fields: _a0, _a1, _a2 -func (_m *Collection) UpdateWithKeys(_a0 context.Context, _a1 []client.DocKey, _a2 string) (*client.UpdateResult, error) { - ret := _m.Called(_a0, _a1, _a2) +// UpdateWithFilter provides a mock function with given fields: ctx, filter, updater +func (_m *Collection) UpdateWithFilter(ctx context.Context, filter interface{}, updater string) (*client.UpdateResult, error) { + ret := _m.Called(ctx, filter, updater) var r0 *client.UpdateResult var r1 error - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey, string) (*client.UpdateResult, error)); ok { - return rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) (*client.UpdateResult, error)); ok { + return rf(ctx, filter, updater) } - if rf, ok := ret.Get(0).(func(context.Context, []client.DocKey, string) *client.UpdateResult); ok { - r0 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(0).(func(context.Context, interface{}, string) *client.UpdateResult); ok { + r0 = rf(ctx, filter, updater) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*client.UpdateResult) } } - if rf, ok := ret.Get(1).(func(context.Context, []client.DocKey, string) error); ok { - r1 = rf(_a0, _a1, _a2) + if rf, ok := ret.Get(1).(func(context.Context, interface{}, string) error); ok { + r1 = rf(ctx, filter, updater) } else { r1 = ret.Error(1) } @@ -1223,32 +1223,32 @@ func (_m *Collection) UpdateWithKeys(_a0 context.Context, _a1 []client.DocKey, _ return r0, r1 } -// Collection_UpdateWithKeys_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithKeys' -type Collection_UpdateWithKeys_Call struct { +// Collection_UpdateWithFilter_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateWithFilter' +type Collection_UpdateWithFilter_Call struct { *mock.Call } -// UpdateWithKeys is a helper method to define mock.On call -// - _a0 context.Context -// - _a1 []client.DocKey -// - _a2 string -func (_e *Collection_Expecter) UpdateWithKeys(_a0 interface{}, _a1 interface{}, _a2 interface{}) *Collection_UpdateWithKeys_Call { - return &Collection_UpdateWithKeys_Call{Call: _e.mock.On("UpdateWithKeys", _a0, _a1, _a2)} +// UpdateWithFilter is a helper method to define mock.On call +// - ctx context.Context +// - filter interface{} +// - updater string +func (_e *Collection_Expecter) UpdateWithFilter(ctx interface{}, filter interface{}, updater interface{}) *Collection_UpdateWithFilter_Call { + return &Collection_UpdateWithFilter_Call{Call: _e.mock.On("UpdateWithFilter", ctx, filter, updater)} } -func (_c *Collection_UpdateWithKeys_Call) Run(run func(_a0 context.Context, _a1 []client.DocKey, _a2 string)) *Collection_UpdateWithKeys_Call { +func (_c *Collection_UpdateWithFilter_Call) Run(run func(ctx context.Context, filter interface{}, updater string)) *Collection_UpdateWithFilter_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].([]client.DocKey), args[2].(string)) + run(args[0].(context.Context), args[1].(interface{}), args[2].(string)) }) return _c } -func (_c *Collection_UpdateWithKeys_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithKeys_Call { +func (_c *Collection_UpdateWithFilter_Call) Return(_a0 *client.UpdateResult, _a1 error) *Collection_UpdateWithFilter_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *Collection_UpdateWithKeys_Call) RunAndReturn(run func(context.Context, []client.DocKey, string) (*client.UpdateResult, error)) *Collection_UpdateWithKeys_Call { +func (_c *Collection_UpdateWithFilter_Call) RunAndReturn(run func(context.Context, interface{}, string) (*client.UpdateResult, error)) *Collection_UpdateWithFilter_Call { _c.Call.Return(run) return _c } diff --git a/client/mocks/db.go b/client/mocks/db.go index df7b53fb5a..90dc8986d0 100644 --- a/client/mocks/db.go +++ b/client/mocks/db.go @@ -83,6 +83,62 @@ func (_c *DB_AddSchema_Call) RunAndReturn(run func(context.Context, string) ([]c return _c } +// AddView provides a mock function with given fields: ctx, gqlQuery, sdl +func (_m *DB) AddView(ctx context.Context, gqlQuery string, sdl string) ([]client.CollectionDefinition, error) { + ret := _m.Called(ctx, gqlQuery, sdl) + + var r0 []client.CollectionDefinition + var r1 error + if rf, ok := ret.Get(0).(func(context.Context, string, string) ([]client.CollectionDefinition, error)); ok { + return rf(ctx, gqlQuery, sdl) + } + if rf, ok := ret.Get(0).(func(context.Context, string, string) []client.CollectionDefinition); ok { + r0 = rf(ctx, gqlQuery, sdl) + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).([]client.CollectionDefinition) + } + } + + if rf, ok := ret.Get(1).(func(context.Context, string, string) error); ok { + r1 = rf(ctx, gqlQuery, sdl) + } else { + r1 = ret.Error(1) + } + + return r0, r1 +} + +// DB_AddView_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddView' +type DB_AddView_Call struct { + *mock.Call +} + +// AddView is a helper method to define mock.On call +// - ctx context.Context +// - gqlQuery string +// - sdl string +func (_e *DB_Expecter) AddView(ctx interface{}, gqlQuery interface{}, sdl interface{}) *DB_AddView_Call { + return &DB_AddView_Call{Call: _e.mock.On("AddView", ctx, gqlQuery, sdl)} +} + +func (_c *DB_AddView_Call) Run(run func(ctx context.Context, gqlQuery string, sdl string)) *DB_AddView_Call { + _c.Call.Run(func(args mock.Arguments) { + run(args[0].(context.Context), args[1].(string), args[2].(string)) + }) + return _c +} + +func (_c *DB_AddView_Call) Return(_a0 []client.CollectionDefinition, _a1 error) *DB_AddView_Call { + _c.Call.Return(_a0, _a1) + return _c +} + +func (_c *DB_AddView_Call) RunAndReturn(run func(context.Context, string, string) ([]client.CollectionDefinition, error)) *DB_AddView_Call { + _c.Call.Return(run) + return _c +} + // BasicExport provides a mock function with given fields: ctx, config func (_m *DB) BasicExport(ctx context.Context, config *client.BackupConfig) error { ret := _m.Called(ctx, config) @@ -464,30 +520,30 @@ func (_m *DB) GetAllSchemas(_a0 context.Context) ([]client.SchemaDescription, er return r0, r1 } -// DB_GetAllSchema_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllSchemas' -type DB_GetAllSchema_Call struct { +// DB_GetAllSchemas_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetAllSchemas' +type DB_GetAllSchemas_Call struct { *mock.Call } // GetAllSchemas is a helper method to define mock.On call // - _a0 context.Context -func (_e *DB_Expecter) GetAllSchemas(_a0 interface{}) *DB_GetAllSchema_Call { - return &DB_GetAllSchema_Call{Call: _e.mock.On("GetAllSchemas", _a0)} +func (_e *DB_Expecter) GetAllSchemas(_a0 interface{}) *DB_GetAllSchemas_Call { + return &DB_GetAllSchemas_Call{Call: _e.mock.On("GetAllSchemas", _a0)} } -func (_c *DB_GetAllSchema_Call) Run(run func(_a0 context.Context)) *DB_GetAllSchema_Call { +func (_c *DB_GetAllSchemas_Call) Run(run func(_a0 context.Context)) *DB_GetAllSchemas_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context)) }) return _c } -func (_c *DB_GetAllSchema_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetAllSchema_Call { +func (_c *DB_GetAllSchemas_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetAllSchemas_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetAllSchema_Call) RunAndReturn(run func(context.Context) ([]client.SchemaDescription, error)) *DB_GetAllSchema_Call { +func (_c *DB_GetAllSchemas_Call) RunAndReturn(run func(context.Context) ([]client.SchemaDescription, error)) *DB_GetAllSchemas_Call { _c.Call.Return(run) return _c } @@ -657,21 +713,19 @@ func (_c *DB_GetCollectionsByVersionID_Call) RunAndReturn(run func(context.Conte return _c } -// GetSchemasByName provides a mock function with given fields: _a0, _a1 -func (_m *DB) GetSchemasByName(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { +// GetSchemaByVersionID provides a mock function with given fields: _a0, _a1 +func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.SchemaDescription, error) { ret := _m.Called(_a0, _a1) - var r0 []client.SchemaDescription + var r0 client.SchemaDescription var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) ([]client.SchemaDescription, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) (client.SchemaDescription, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, string) []client.SchemaDescription); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) client.SchemaDescription); ok { r0 = rf(_a0, _a1) } else { - if ret.Get(0) != nil { - r0 = ret.Get(0).([]client.SchemaDescription) - } + r0 = ret.Get(0).(client.SchemaDescription) } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { @@ -683,37 +737,37 @@ func (_m *DB) GetSchemasByName(_a0 context.Context, _a1 string) ([]client.Schema return r0, r1 } -// DB_GetSchemaByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByName' -type DB_GetSchemaByName_Call struct { +// DB_GetSchemaByVersionID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemaByVersionID' +type DB_GetSchemaByVersionID_Call struct { *mock.Call } -// GetSchemasByName is a helper method to define mock.On call +// GetSchemaByVersionID is a helper method to define mock.On call // - _a0 context.Context // - _a1 string -func (_e *DB_Expecter) GetSchemasByName(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByName_Call { - return &DB_GetSchemaByName_Call{Call: _e.mock.On("GetSchemasByName", _a0, _a1)} +func (_e *DB_Expecter) GetSchemaByVersionID(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByVersionID_Call { + return &DB_GetSchemaByVersionID_Call{Call: _e.mock.On("GetSchemaByVersionID", _a0, _a1)} } -func (_c *DB_GetSchemaByName_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByName_Call { +func (_c *DB_GetSchemaByVersionID_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByVersionID_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) return _c } -func (_c *DB_GetSchemaByName_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemaByName_Call { +func (_c *DB_GetSchemaByVersionID_Call) Return(_a0 client.SchemaDescription, _a1 error) *DB_GetSchemaByVersionID_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetSchemaByName_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemaByName_Call { +func (_c *DB_GetSchemaByVersionID_Call) RunAndReturn(run func(context.Context, string) (client.SchemaDescription, error)) *DB_GetSchemaByVersionID_Call { _c.Call.Return(run) return _c } -// GetSchemasByRoot provides a mock function with given fields: _a0, _a1 -func (_m *DB) GetSchemasByRoot(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { +// GetSchemasByName provides a mock function with given fields: _a0, _a1 +func (_m *DB) GetSchemasByName(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { ret := _m.Called(_a0, _a1) var r0 []client.SchemaDescription @@ -738,48 +792,50 @@ func (_m *DB) GetSchemasByRoot(_a0 context.Context, _a1 string) ([]client.Schema return r0, r1 } -// DB_GetSchemaByRoot_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByRoot' -type DB_GetSchemaByRoot_Call struct { +// DB_GetSchemasByName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByName' +type DB_GetSchemasByName_Call struct { *mock.Call } -// GetSchemasByRoot is a helper method to define mock.On call +// GetSchemasByName is a helper method to define mock.On call // - _a0 context.Context // - _a1 string -func (_e *DB_Expecter) GetSchemasByRoot(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByRoot_Call { - return &DB_GetSchemaByRoot_Call{Call: _e.mock.On("GetSchemasByRoot", _a0, _a1)} +func (_e *DB_Expecter) GetSchemasByName(_a0 interface{}, _a1 interface{}) *DB_GetSchemasByName_Call { + return &DB_GetSchemasByName_Call{Call: _e.mock.On("GetSchemasByName", _a0, _a1)} } -func (_c *DB_GetSchemaByRoot_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByRoot_Call { +func (_c *DB_GetSchemasByName_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemasByName_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) return _c } -func (_c *DB_GetSchemaByRoot_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemaByRoot_Call { +func (_c *DB_GetSchemasByName_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemasByName_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetSchemaByRoot_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemaByRoot_Call { +func (_c *DB_GetSchemasByName_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemasByName_Call { _c.Call.Return(run) return _c } -// GetSchemaByVersionID provides a mock function with given fields: _a0, _a1 -func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.SchemaDescription, error) { +// GetSchemasByRoot provides a mock function with given fields: _a0, _a1 +func (_m *DB) GetSchemasByRoot(_a0 context.Context, _a1 string) ([]client.SchemaDescription, error) { ret := _m.Called(_a0, _a1) - var r0 client.SchemaDescription + var r0 []client.SchemaDescription var r1 error - if rf, ok := ret.Get(0).(func(context.Context, string) (client.SchemaDescription, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) ([]client.SchemaDescription, error)); ok { return rf(_a0, _a1) } - if rf, ok := ret.Get(0).(func(context.Context, string) client.SchemaDescription); ok { + if rf, ok := ret.Get(0).(func(context.Context, string) []client.SchemaDescription); ok { r0 = rf(_a0, _a1) } else { - r0 = ret.Get(0).(client.SchemaDescription) + if ret.Get(0) != nil { + r0 = ret.Get(0).([]client.SchemaDescription) + } } if rf, ok := ret.Get(1).(func(context.Context, string) error); ok { @@ -791,31 +847,31 @@ func (_m *DB) GetSchemaByVersionID(_a0 context.Context, _a1 string) (client.Sche return r0, r1 } -// DB_GetSchemaByVersionID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemaByVersionID' -type DB_GetSchemaByVersionID_Call struct { +// DB_GetSchemasByRoot_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetSchemasByRoot' +type DB_GetSchemasByRoot_Call struct { *mock.Call } -// GetSchemaByVersionID is a helper method to define mock.On call +// GetSchemasByRoot is a helper method to define mock.On call // - _a0 context.Context // - _a1 string -func (_e *DB_Expecter) GetSchemaByVersionID(_a0 interface{}, _a1 interface{}) *DB_GetSchemaByVersionID_Call { - return &DB_GetSchemaByVersionID_Call{Call: _e.mock.On("GetSchemaByVersionID", _a0, _a1)} +func (_e *DB_Expecter) GetSchemasByRoot(_a0 interface{}, _a1 interface{}) *DB_GetSchemasByRoot_Call { + return &DB_GetSchemasByRoot_Call{Call: _e.mock.On("GetSchemasByRoot", _a0, _a1)} } -func (_c *DB_GetSchemaByVersionID_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemaByVersionID_Call { +func (_c *DB_GetSchemasByRoot_Call) Run(run func(_a0 context.Context, _a1 string)) *DB_GetSchemasByRoot_Call { _c.Call.Run(func(args mock.Arguments) { run(args[0].(context.Context), args[1].(string)) }) return _c } -func (_c *DB_GetSchemaByVersionID_Call) Return(_a0 client.SchemaDescription, _a1 error) *DB_GetSchemaByVersionID_Call { +func (_c *DB_GetSchemasByRoot_Call) Return(_a0 []client.SchemaDescription, _a1 error) *DB_GetSchemasByRoot_Call { _c.Call.Return(_a0, _a1) return _c } -func (_c *DB_GetSchemaByVersionID_Call) RunAndReturn(run func(context.Context, string) (client.SchemaDescription, error)) *DB_GetSchemaByVersionID_Call { +func (_c *DB_GetSchemasByRoot_Call) RunAndReturn(run func(context.Context, string) ([]client.SchemaDescription, error)) *DB_GetSchemasByRoot_Call { _c.Call.Return(run) return _c } diff --git a/client/request/commit.go b/client/request/commit.go index 0715276547..ff65e20822 100644 --- a/client/request/commit.go +++ b/client/request/commit.go @@ -19,7 +19,7 @@ var ( type CommitSelect struct { Field - DocKey immutable.Option[string] + DocID immutable.Option[string] FieldID immutable.Option[string] Cid immutable.Option[string] Depth immutable.Option[uint64] diff --git a/client/request/consts.go b/client/request/consts.go index 7287a49ac3..85b7d63d84 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -21,12 +21,8 @@ const ( Cid = "cid" Data = "data" - DocKey = "dockey" - DocKeys = "dockeys" FieldName = "field" FieldIDName = "fieldId" - Id = "id" - Ids = "ids" ShowDeleted = "showDeleted" FilterClause = "filter" @@ -36,14 +32,21 @@ const ( OrderClause = "order" DepthClause = "depth" + DocIDArgName = "docID" + DocIDsArgName = "docIDs" + AverageFieldName = "_avg" CountFieldName = "_count" - KeyFieldName = "_key" + DocIDFieldName = "_docID" GroupFieldName = "_group" DeletedFieldName = "_deleted" SumFieldName = "_sum" VersionFieldName = "_version" + // New generated document id from a backed up document, + // which might have a different _docID originally. + NewDocIDFieldName = "_docIDNew" + ExplainLabel = "explain" LatestCommitsName = "latestCommits" @@ -53,13 +56,18 @@ const ( LinksFieldName = "links" HeightFieldName = "height" CidFieldName = "cid" - DockeyFieldName = "dockey" CollectionIDFieldName = "collectionID" SchemaVersionIDFieldName = "schemaVersionId" FieldNameFieldName = "fieldName" FieldIDFieldName = "fieldId" DeltaFieldName = "delta" + DeltaArgFieldName = "FieldName" + DeltaArgData = "Data" + DeltaArgSchemaVersionID = "SchemaVersionID" + DeltaArgPriority = "Priority" + DeltaArgDocID = "DocID" + LinksNameFieldName = "name" LinksCidFieldName = "cid" @@ -80,7 +88,7 @@ var ( CountFieldName: true, SumFieldName: true, AverageFieldName: true, - KeyFieldName: true, + DocIDFieldName: true, DeletedFieldName: true, } @@ -98,7 +106,7 @@ var ( VersionFields = []string{ HeightFieldName, CidFieldName, - DockeyFieldName, + DocIDArgName, CollectionIDFieldName, SchemaVersionIDFieldName, FieldNameFieldName, diff --git a/client/request/mutation.go b/client/request/mutation.go index c7f0e07ee8..3d19210458 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -46,8 +46,8 @@ func (m ObjectMutation) ToSelect() *Select { Name: m.Collection, Alias: m.Alias, }, - Fields: m.Fields, - DocKeys: m.IDs, - Filter: m.Filter, + Fields: m.Fields, + DocIDs: m.IDs, + Filter: m.Filter, } } diff --git a/client/request/select.go b/client/request/select.go index f7d1517dec..863bba2aeb 100644 --- a/client/request/select.go +++ b/client/request/select.go @@ -30,8 +30,8 @@ const ( type Select struct { Field - DocKeys immutable.Option[[]string] - CID immutable.Option[string] + DocIDs immutable.Option[[]string] + CID immutable.Option[string] // Root is the top level type of parsed request Root SelectionType @@ -114,7 +114,7 @@ func (s *Select) validateGroupBy() []error { // of `Select` objects. type selectJson struct { Field - DocKeys immutable.Option[[]string] + DocIDs immutable.Option[[]string] CID immutable.Option[string] Root SelectionType Limit immutable.Option[uint64] @@ -140,7 +140,7 @@ func (s *Select) UnmarshalJSON(bytes []byte) error { } s.Field = selectMap.Field - s.DocKeys = selectMap.DocKeys + s.DocIDs = selectMap.DocIDs s.CID = selectMap.CID s.Root = selectMap.Root s.Limit = selectMap.Limit diff --git a/client/request/subscription.go b/client/request/subscription.go index c788efbb4c..bb4e01156c 100644 --- a/client/request/subscription.go +++ b/client/request/subscription.go @@ -30,15 +30,15 @@ type ObjectSubscription struct { // ToSelect returns a basic Select object, with the same Name, Alias, and Fields as // the Subscription object. Used to create a Select planNode for the event stream return objects. -func (m ObjectSubscription) ToSelect(docKey, cid string) *Select { +func (m ObjectSubscription) ToSelect(docID, cid string) *Select { return &Select{ Field: Field{ Name: m.Collection, Alias: m.Alias, }, - DocKeys: immutable.Some([]string{docKey}), - CID: immutable.Some(cid), - Fields: m.Fields, - Filter: m.Filter, + DocIDs: immutable.Some([]string{docID}), + CID: immutable.Some(cid), + Fields: m.Fields, + Filter: m.Filter, } } diff --git a/core/crdt/base_test.go b/core/crdt/base_test.go index e69d69f05e..c3db4af3d6 100644 --- a/core/crdt/base_test.go +++ b/core/crdt/base_test.go @@ -41,7 +41,7 @@ func TestBaseCRDTNew(t *testing.T) { func TestBaseCRDTvalueKey(t *testing.T) { base := exampleBaseCRDT() - vk := base.key.WithDocKey("mykey").WithValueFlag() + vk := base.key.WithDocID("mykey").WithValueFlag() if vk.ToString() != "/v/mykey" { t.Errorf("Incorrect valueKey. Have %v, want %v", vk.ToString(), "/v/mykey") } @@ -49,7 +49,7 @@ func TestBaseCRDTvalueKey(t *testing.T) { func TestBaseCRDTprioryKey(t *testing.T) { base := exampleBaseCRDT() - pk := base.key.WithDocKey("mykey").WithPriorityFlag() + pk := base.key.WithDocID("mykey").WithPriorityFlag() if pk.ToString() != "/p/mykey" { t.Errorf("Incorrect priorityKey. Have %v, want %v", pk.ToString(), "/p/mykey") } @@ -58,13 +58,13 @@ func TestBaseCRDTprioryKey(t *testing.T) { func TestBaseCRDTSetGetPriority(t *testing.T) { base := exampleBaseCRDT() ctx := context.Background() - err := base.setPriority(ctx, base.key.WithDocKey("mykey"), 10) + err := base.setPriority(ctx, base.key.WithDocID("mykey"), 10) if err != nil { t.Errorf("baseCRDT failed to set Priority. err: %v", err) return } - priority, err := base.getPriority(ctx, base.key.WithDocKey("mykey")) + priority, err := base.getPriority(ctx, base.key.WithDocID("mykey")) if err != nil { t.Errorf("baseCRDT failed to get priority. err: %v", err) return diff --git a/core/crdt/composite.go b/core/crdt/composite.go index 761cc07828..39c0a3efe4 100644 --- a/core/crdt/composite.go +++ b/core/crdt/composite.go @@ -37,7 +37,7 @@ type CompositeDAGDelta struct { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte SubDAGs []core.DAGLink // Status represents the status of the document. By default it is `Active`. // Alternatively, if can be set to `Deleted`. @@ -67,10 +67,10 @@ func (delta *CompositeDAGDelta) Marshal() ([]byte, error) { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte Status uint8 FieldName string - }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocKey, delta.Status.UInt8(), delta.FieldName}) + }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocID, delta.Status.UInt8(), delta.FieldName}) if err != nil { return nil, err } @@ -116,7 +116,7 @@ func (c CompositeDAG) Set(patch []byte, links []core.DAGLink) *CompositeDAGDelta }) return &CompositeDAGDelta{ Data: patch, - DocKey: []byte(c.key.DocKey), + DocID: []byte(c.key.DocID), SubDAGs: links, SchemaVersionID: c.schemaVersionKey.SchemaVersionId, FieldName: c.fieldName, diff --git a/core/crdt/lwwreg.go b/core/crdt/lwwreg.go index 18979c1bfb..c256c35cea 100644 --- a/core/crdt/lwwreg.go +++ b/core/crdt/lwwreg.go @@ -32,7 +32,7 @@ type LWWRegDelta struct { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte FieldName string } @@ -58,9 +58,9 @@ func (delta *LWWRegDelta) Marshal() ([]byte, error) { SchemaVersionID string Priority uint64 Data []byte - DocKey []byte + DocID []byte FieldName string - }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocKey, delta.FieldName}) + }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocID, delta.FieldName}) if err != nil { return nil, err } @@ -105,7 +105,7 @@ func (reg LWWRegister) Value(ctx context.Context) ([]byte, error) { func (reg LWWRegister) Set(value []byte) *LWWRegDelta { return &LWWRegDelta{ Data: value, - DocKey: []byte(reg.key.DocKey), + DocID: []byte(reg.key.DocID), FieldName: reg.fieldName, SchemaVersionID: reg.schemaVersionKey.SchemaVersionId, } diff --git a/core/crdt/lwwreg_test.go b/core/crdt/lwwreg_test.go index 2b978feb2d..5e6e1b27a4 100644 --- a/core/crdt/lwwreg_test.go +++ b/core/crdt/lwwreg_test.go @@ -32,7 +32,7 @@ func newMockStore() datastore.DSReaderWriter { func setupLWWRegister() LWWRegister { store := newMockStore() - key := core.DataStoreKey{DocKey: "AAAA-BBBB"} + key := core.DataStoreKey{DocID: "AAAA-BBBB"} return NewLWWRegister(store, core.CollectionSchemaVersionKey{}, key, "") } diff --git a/core/doc.go b/core/doc.go index 8f6700f50c..2a149dccc5 100644 --- a/core/doc.go +++ b/core/doc.go @@ -20,8 +20,8 @@ import ( "github.com/sourcenetwork/defradb/client/request" ) -// DocKeyFieldIndex is the index of the key field in a document. -const DocKeyFieldIndex int = 0 +// DocIDFieldIndex is the index of the DocID field in a document. +const DocIDFieldIndex int = 0 // DocFields is a slice of fields in a document. type DocFields []any @@ -39,19 +39,19 @@ type Doc struct { SchemaVersionID string } -// GetKey returns the DocKey for this document. +// GetID returns the DocID for this document. // // Will panic if the document is empty. -func (d *Doc) GetKey() string { - key, _ := d.Fields[DocKeyFieldIndex].(string) - return key +func (d *Doc) GetID() string { + docID, _ := d.Fields[DocIDFieldIndex].(string) + return docID } -// SetKey sets the DocKey for this document. +// SetID sets the DocID for this document. // // Will panic if the document has not been initialised with fields. -func (d *Doc) SetKey(key string) { - d.Fields[DocKeyFieldIndex] = key +func (d *Doc) SetID(docID string) { + d.Fields[DocIDFieldIndex] = docID } // Clone returns a deep copy of this document. diff --git a/core/key.go b/core/key.go index 0a2529338a..0c038b11dd 100644 --- a/core/key.go +++ b/core/key.go @@ -67,7 +67,7 @@ type Key interface { type DataStoreKey struct { CollectionID string InstanceType InstanceType - DocKey string + DocID string FieldId string } @@ -87,13 +87,13 @@ var _ Key = (*IndexDataStoreKey)(nil) type PrimaryDataStoreKey struct { CollectionId string - DocKey string + DocID string } var _ Key = (*PrimaryDataStoreKey)(nil) type HeadStoreKey struct { - DocKey string + DocID string FieldId string //can be 'C' Cid cid.Cid } @@ -192,7 +192,7 @@ var _ Key = (*ReplicatorKey)(nil) // splitting the input using '/' as a field deliminator. It assumes // that the input string is in the following format: // -// /[CollectionId]/[InstanceType]/[DocKey]/[FieldId] +// /[CollectionId]/[InstanceType]/[DocID]/[FieldId] // // Any properties before the above (assuming a '/' deliminator) are ignored func NewDataStoreKey(key string) (DataStoreKey, error) { @@ -212,7 +212,7 @@ func NewDataStoreKey(key string) (DataStoreKey, error) { dataStoreKey.CollectionID = elements[0] dataStoreKey.InstanceType = InstanceType(elements[1]) - dataStoreKey.DocKey = elements[2] + dataStoreKey.DocID = elements[2] if numberOfElements == 4 { dataStoreKey.FieldId = elements[3] } @@ -228,9 +228,9 @@ func MustNewDataStoreKey(key string) DataStoreKey { return dsKey } -func DataStoreKeyFromDocKey(dockey client.DocKey) DataStoreKey { +func DataStoreKeyFromDocID(docID client.DocID) DataStoreKey { return DataStoreKey{ - DocKey: dockey.String(), + DocID: docID.String(), } } @@ -238,7 +238,7 @@ func DataStoreKeyFromDocKey(dockey client.DocKey) DataStoreKey { // splitting the input using '/' as a field deliminator. It assumes // that the input string is in the following format: // -// /[DocKey]/[FieldId]/[Cid] +// /[DocID]/[FieldId]/[Cid] // // Any properties before the above are ignored func NewHeadStoreKey(key string) (HeadStoreKey, error) { @@ -254,7 +254,7 @@ func NewHeadStoreKey(key string) (HeadStoreKey, error) { return HeadStoreKey{ // elements[0] is empty (key has leading '/') - DocKey: elements[1], + DocID: elements[1], FieldId: elements[2], Cid: cid, }, nil @@ -390,15 +390,15 @@ func (k DataStoreKey) WithDeletedFlag() DataStoreKey { return newKey } -func (k DataStoreKey) WithDocKey(docKey string) DataStoreKey { +func (k DataStoreKey) WithDocID(docID string) DataStoreKey { newKey := k - newKey.DocKey = docKey + newKey.DocID = docID return newKey } func (k DataStoreKey) WithInstanceInfo(key DataStoreKey) DataStoreKey { newKey := k - newKey.DocKey = key.DocKey + newKey.DocID = key.DocID newKey.FieldId = key.FieldId newKey.InstanceType = key.InstanceType return newKey @@ -412,14 +412,14 @@ func (k DataStoreKey) WithFieldId(fieldId string) DataStoreKey { func (k DataStoreKey) ToHeadStoreKey() HeadStoreKey { return HeadStoreKey{ - DocKey: k.DocKey, + DocID: k.DocID, FieldId: k.FieldId, } } -func (k HeadStoreKey) WithDocKey(docKey string) HeadStoreKey { +func (k HeadStoreKey) WithDocID(docID string) HeadStoreKey { newKey := k - newKey.DocKey = docKey + newKey.DocID = docID return newKey } @@ -444,8 +444,8 @@ func (k DataStoreKey) ToString() string { if k.InstanceType != "" { result = result + "/" + string(k.InstanceType) } - if k.DocKey != "" { - result = result + "/" + k.DocKey + if k.DocID != "" { + result = result + "/" + k.DocID } if k.FieldId != "" { result = result + "/" + k.FieldId @@ -464,7 +464,7 @@ func (k DataStoreKey) ToDS() ds.Key { func (k DataStoreKey) Equal(other DataStoreKey) bool { return k.CollectionID == other.CollectionID && - k.DocKey == other.DocKey && + k.DocID == other.DocID && k.FieldId == other.FieldId && k.InstanceType == other.InstanceType } @@ -472,7 +472,7 @@ func (k DataStoreKey) Equal(other DataStoreKey) bool { func (k DataStoreKey) ToPrimaryDataStoreKey() PrimaryDataStoreKey { return PrimaryDataStoreKey{ CollectionId: k.CollectionID, - DocKey: k.DocKey, + DocID: k.DocID, } } @@ -582,7 +582,7 @@ func (k IndexDataStoreKey) Equal(other IndexDataStoreKey) bool { func (k PrimaryDataStoreKey) ToDataStoreKey() DataStoreKey { return DataStoreKey{ CollectionID: k.CollectionId, - DocKey: k.DocKey, + DocID: k.DocID, } } @@ -601,8 +601,8 @@ func (k PrimaryDataStoreKey) ToString() string { result = result + "/" + k.CollectionId } result = result + PRIMARY_KEY - if k.DocKey != "" { - result = result + "/" + k.DocKey + if k.DocID != "" { + result = result + "/" + k.DocID } return result @@ -786,8 +786,8 @@ func (k ReplicatorKey) ToDS() ds.Key { func (k HeadStoreKey) ToString() string { var result string - if k.DocKey != "" { - result = result + "/" + k.DocKey + if k.DocID != "" { + result = result + "/" + k.DocID } if k.FieldId != "" { result = result + "/" + k.FieldId @@ -817,8 +817,8 @@ func (k DataStoreKey) PrefixEnd() DataStoreKey { newKey.FieldId = string(bytesPrefixEnd([]byte(k.FieldId))) return newKey } - if k.DocKey != "" { - newKey.DocKey = string(bytesPrefixEnd([]byte(k.DocKey))) + if k.DocID != "" { + newKey.DocID = string(bytesPrefixEnd([]byte(k.DocID))) return newKey } if k.InstanceType != "" { diff --git a/core/key_test.go b/core/key_test.go index d22498bd8c..4984c5b14f 100644 --- a/core/key_test.go +++ b/core/key_test.go @@ -29,14 +29,14 @@ func TestNewDataStoreKey_ReturnsEmptyStruct_GivenEmptyString(t *testing.T) { assert.ErrorIs(t, ErrEmptyKey, err) } -func TestNewDataStoreKey_ReturnsCollectionIdAndIndexIdAndDocKeyAndFieldIdAndInstanceType_GivenFourItemsWithType( +func TestNewDataStoreKey_ReturnsCollectionIdAndIndexIdAndDocIDAndFieldIdAndInstanceType_GivenFourItemsWithType( t *testing.T, ) { instanceType := "anyType" fieldId := "f1" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := collectionId + "/" + instanceType + "/" + docKey + "/" + fieldId + inputString := collectionId + "/" + instanceType + "/" + docID + "/" + fieldId result, err := NewDataStoreKey(inputString) if err != nil { @@ -48,11 +48,11 @@ func TestNewDataStoreKey_ReturnsCollectionIdAndIndexIdAndDocKeyAndFieldIdAndInst t, DataStoreKey{ CollectionID: collectionId, - DocKey: docKey, + DocID: docID, FieldId: fieldId, InstanceType: InstanceType(instanceType)}, result) - assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docKey+"/"+fieldId, resultString) + assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docID+"/"+fieldId, resultString) } func TestNewDataStoreKey_ReturnsEmptyStruct_GivenAStringWithMissingElements(t *testing.T) { @@ -65,9 +65,9 @@ func TestNewDataStoreKey_ReturnsEmptyStruct_GivenAStringWithMissingElements(t *t func TestNewDataStoreKey_GivenAShortObjectMarker(t *testing.T) { instanceType := "anyType" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := collectionId + "/" + instanceType + "/" + docKey + inputString := collectionId + "/" + instanceType + "/" + docID result, err := NewDataStoreKey(inputString) if err != nil { @@ -79,18 +79,18 @@ func TestNewDataStoreKey_GivenAShortObjectMarker(t *testing.T) { t, DataStoreKey{ CollectionID: collectionId, - DocKey: docKey, + DocID: docID, InstanceType: InstanceType(instanceType)}, result) - assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docKey, resultString) + assert.Equal(t, "/"+collectionId+"/"+instanceType+"/"+docID, resultString) } func TestNewDataStoreKey_GivenAStringWithExtraPrefixes(t *testing.T) { instanceType := "anyType" fieldId := "f1" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := "/db/my_database_name/data/" + collectionId + "/" + instanceType + "/" + docKey + "/" + fieldId + inputString := "/db/my_database_name/data/" + collectionId + "/" + instanceType + "/" + docID + "/" + fieldId _, err := NewDataStoreKey(inputString) @@ -100,9 +100,9 @@ func TestNewDataStoreKey_GivenAStringWithExtraPrefixes(t *testing.T) { func TestNewDataStoreKey_GivenAStringWithExtraSuffix(t *testing.T) { instanceType := "anyType" fieldId := "f1" - docKey := "docKey" + docID := "docID" collectionId := "1" - inputString := "/db/data/" + collectionId + "/" + instanceType + "/" + docKey + "/" + fieldId + "/version_number" + inputString := "/db/data/" + collectionId + "/" + instanceType + "/" + docID + "/" + fieldId + "/version_number" _, err := NewDataStoreKey(inputString) diff --git a/db/backup.go b/db/backup.go index e2958d1c96..cc8cd01fff 100644 --- a/db/backup.go +++ b/db/backup.go @@ -74,7 +74,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin for _, field := range col.Schema().Fields { if field.Kind == client.FieldKind_FOREIGN_OBJECT { if val, ok := docMap[field.Name+request.RelatedObjectID]; ok { - if docMap["_newKey"] == val { + if docMap[request.NewDocIDFieldName] == val { resetMap[field.Name+request.RelatedObjectID] = val delete(docMap, field.Name+request.RelatedObjectID) } @@ -82,8 +82,8 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin } } - delete(docMap, "_key") - delete(docMap, "_newKey") + delete(docMap, request.DocIDFieldName) + delete(docMap, request.NewDocIDFieldName) doc, err := client.NewDocFromMap(docMap) if err != nil { @@ -189,13 +189,13 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } colTxn := col.WithTxn(txn) - keysCh, err := colTxn.GetAllDocKeys(ctx) + docIDsCh, err := colTxn.GetAllDocIDs(ctx) if err != nil { return err } firstDoc := true - for key := range keysCh { + for docResultWithID := range docIDsCh { if firstDoc { firstDoc = false } else { @@ -205,7 +205,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } } - doc, err := colTxn.Get(ctx, key.Key, false) + doc, err := colTxn.Get(ctx, docResultWithID.ID, false) if err != nil { return err } @@ -225,7 +225,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - if foreignKey.(string) == doc.Key().String() { + if foreignKey.(string) == doc.ID().String() { isSelfReference = true refFieldName = field.Name + request.RelatedObjectID } @@ -234,11 +234,11 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return NewErrFailedToGetCollection(field.Schema, err) } - foreignDocKey, err := client.NewDocKeyFromString(foreignKey.(string)) + foreignDocID, err := client.NewDocIDFromString(foreignKey.(string)) if err != nil { return err } - foreignDoc, err := foreignCol.Get(ctx, foreignDocKey, false) + foreignDoc, err := foreignCol.Get(ctx, foreignDocID, false) if err != nil { err := doc.Set(field.Name+request.RelatedObjectID, nil) if err != nil { @@ -250,12 +250,12 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } - delete(oldForeignDoc, "_key") - if foreignDoc.Key().String() == foreignDocKey.String() { + delete(oldForeignDoc, request.DocIDFieldName) + if foreignDoc.ID().String() == foreignDocID.String() { delete(oldForeignDoc, field.Name+request.RelatedObjectID) } - if foreignDoc.Key().String() == doc.Key().String() { + if foreignDoc.ID().String() == doc.ID().String() { isSelfReference = true refFieldName = field.Name + request.RelatedObjectID } @@ -265,15 +265,15 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } - if foreignDoc.Key().String() != doc.Key().String() { - err = doc.Set(field.Name+request.RelatedObjectID, newForeignDoc.Key().String()) + if foreignDoc.ID().String() != doc.ID().String() { + err = doc.Set(field.Name+request.RelatedObjectID, newForeignDoc.ID().String()) if err != nil { return err } } - if newForeignDoc.Key().String() != foreignDoc.Key().String() { - keyChangeCache[foreignDoc.Key().String()] = newForeignDoc.Key().String() + if newForeignDoc.ID().String() != foreignDoc.ID().String() { + keyChangeCache[foreignDoc.ID().String()] = newForeignDoc.ID().String() } } } @@ -286,7 +286,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client return err } - delete(docM, "_key") + delete(docM, request.DocIDFieldName) if isSelfReference { delete(docM, refFieldName) } @@ -295,17 +295,17 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client if err != nil { return err } - // newKey is needed to let the user know what will be the key of the imported document. - docM["_newKey"] = newDoc.Key().String() - // NewDocFromMap removes the "_key" map item so we add it back. - docM["_key"] = doc.Key().String() + // a new docID is needed to let the user know what will be the docID of the imported document. + docM[request.NewDocIDFieldName] = newDoc.ID().String() + // NewDocFromMap removes the "_docID" map item so we add it back. + docM[request.DocIDFieldName] = doc.ID().String() if isSelfReference { - docM[refFieldName] = newDoc.Key().String() + docM[refFieldName] = newDoc.ID().String() } - if newDoc.Key().String() != doc.Key().String() { - keyChangeCache[doc.Key().String()] = newDoc.Key().String() + if newDoc.ID().String() != doc.ID().String() { + keyChangeCache[doc.ID().String()] = newDoc.ID().String() } var b []byte diff --git a/db/backup_test.go b/db/backup_test.go index f0e7a6e338..cbe1aed58d 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -77,7 +77,7 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -139,7 +139,7 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -201,7 +201,7 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -275,7 +275,7 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Book":[{"_key":"bae-4399f189-138d-5d49-9e25-82e78463677b","_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}],"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`) + data := []byte(`{"Book":[{"_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b","_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}],"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -330,7 +330,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), + []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), 0664, ) require.NoError(t, err) @@ -345,7 +345,7 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { require.NoError(t, err) expectedMap := map[string]any{} - data := []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) + data := []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}]}`) err = json.Unmarshal(data, &expectedMap) require.NoError(t, err) require.EqualValues(t, expectedMap, fileMap) @@ -375,7 +375,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), + []byte(`{"Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`), 0664, ) require.NoError(t, err) @@ -391,7 +391,7 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { col1, err := db.getCollectionByName(ctx, txn, "Address") require.NoError(t, err) - key1, err := client.NewDocKeyFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") + key1, err := client.NewDocIDFromString("bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f") require.NoError(t, err) _, err = col1.Get(ctx, key1, false) require.NoError(t, err) @@ -399,12 +399,12 @@ func TestBasicImport_WithMultipleCollectionsAndObjects_NoError(t *testing.T) { col2, err := db.getCollectionByName(ctx, txn, "User") require.NoError(t, err) - key2, err := client.NewDocKeyFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") + key2, err := client.NewDocIDFromString("bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df") require.NoError(t, err) _, err = col2.Get(ctx, key2, false) require.NoError(t, err) - key3, err := client.NewDocKeyFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") + key3, err := client.NewDocIDFromString("bae-e933420a-988a-56f8-8952-6c245aebd519") require.NoError(t, err) _, err = col2.Get(ctx, key3, false) require.NoError(t, err) @@ -434,7 +434,7 @@ func TestBasicImport_WithJSONArray_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`["Address":[{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_key":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_newKey":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]]`), + []byte(`["Address":[{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}],"User":[{"_docID":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","_docIDNew":"bae-b94880d1-e6d2-542f-b9e0-5a369fafd0df","age":40,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]]`), 0664, ) require.NoError(t, err) @@ -469,7 +469,7 @@ func TestBasicImport_WithObjectCollection_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), + []byte(`{"Address":{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), 0664, ) require.NoError(t, err) @@ -504,7 +504,7 @@ func TestBasicImport_WithInvalidFilepath_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Address":{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), + []byte(`{"Address":{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), 0664, ) require.NoError(t, err) @@ -540,7 +540,7 @@ func TestBasicImport_WithInvalidCollection_ReturnError(t *testing.T) { err = os.WriteFile( filepath, - []byte(`{"Addresses":{"_key":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_newKey":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), + []byte(`{"Addresses":{"_docID":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","_docIDNew":"bae-8096f2c1-ea4c-5226-8ba5-17fc4b68ac1f","city":"Toronto","street":"101 Maple St"}}`), 0664, ) require.NoError(t, err) diff --git a/db/base/collection_keys.go b/db/base/collection_keys.go index 6a762ff180..f32da872fe 100644 --- a/db/base/collection_keys.go +++ b/db/base/collection_keys.go @@ -17,18 +17,21 @@ import ( "github.com/sourcenetwork/defradb/core" ) -// MakeIndexPrefix generates a key prefix for the given collection/index descriptions -func MakeCollectionKey(col client.CollectionDescription) core.DataStoreKey { +// MakeDataStoreKeyWithCollectionDescription returns the datastore key for the given collection description. +func MakeDataStoreKeyWithCollectionDescription(col client.CollectionDescription) core.DataStoreKey { return core.DataStoreKey{ CollectionID: col.IDString(), } } -// MakeIndexKey generates a key for the target dockey, using the collection/index description -func MakeDocKey(col client.CollectionDescription, docKey string) core.DataStoreKey { +// MakeDataStoreKeyWithCollectionAndDocID returns the datastore key for the given docID and collection description. +func MakeDataStoreKeyWithCollectionAndDocID( + col client.CollectionDescription, + docID string, +) core.DataStoreKey { return core.DataStoreKey{ CollectionID: col.IDString(), - DocKey: docKey, + DocID: docID, } } @@ -41,14 +44,14 @@ func MakePrimaryIndexKeyForCRDT( ) (core.DataStoreKey, error) { switch ctype { case client.COMPOSITE: - return MakeCollectionKey(c).WithInstanceInfo(key).WithFieldId(core.COMPOSITE_NAMESPACE), nil + return MakeDataStoreKeyWithCollectionDescription(c).WithInstanceInfo(key).WithFieldId(core.COMPOSITE_NAMESPACE), nil case client.LWW_REGISTER: field, ok := c.GetFieldByName(fieldName, &schema) if !ok { return core.DataStoreKey{}, client.NewErrFieldNotExist(fieldName) } - return MakeCollectionKey(c).WithInstanceInfo(key).WithFieldId(fmt.Sprint(field.ID)), nil + return MakeDataStoreKeyWithCollectionDescription(c).WithInstanceInfo(key).WithFieldId(fmt.Sprint(field.ID)), nil } return core.DataStoreKey{}, ErrInvalidCrdtType } diff --git a/db/collection.go b/db/collection.go index 65b0fbaa22..f5b1cd3b27 100644 --- a/db/collection.go +++ b/db/collection.go @@ -173,7 +173,7 @@ func (db *db) updateSchema( if _, ok := schema.GetField(idFieldName); !ok { schema.Fields = append(schema.Fields, client.FieldDescription{ Name: idFieldName, - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, RelationType: client.Relation_Type_INTERNAL_ID, RelationName: field.RelationName, }) @@ -285,7 +285,7 @@ func validateUpdateSchemaFields( var existingField client.FieldDescription var fieldAlreadyExists bool if proposedField.ID != client.FieldID(0) || - proposedField.Name == request.KeyFieldName { + proposedField.Name == request.DocIDFieldName { existingField, fieldAlreadyExists = existingFieldsByID[proposedField.ID] } @@ -351,8 +351,8 @@ func validateUpdateSchemaFields( idFieldName := proposedField.Name + request.RelatedObjectID idField, idFieldFound := proposedDesc.GetField(idFieldName) if idFieldFound { - if idField.Kind != client.FieldKind_DocKey { - return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocKey, idField.Kind) + if idField.Kind != client.FieldKind_DocID { + return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocID, idField.Kind) } if idField.RelationType != client.Relation_Type_INTERNAL_ID { @@ -630,23 +630,23 @@ func (db *db) getAllCollections(ctx context.Context, txn datastore.Txn) ([]clien return collections, nil } -// GetAllDocKeys returns all the document keys that exist in the collection. +// GetAllDocIDs returns all the document IDs that exist in the collection. // // @todo: We probably need a lock on the collection for this kind of op since // it hits every key and will cause Tx conflicts for concurrent Txs -func (c *collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (c *collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { txn, err := c.getTxn(ctx, true) if err != nil { return nil, err } - return c.getAllDocKeysChan(ctx, txn) + return c.getAllDocIDsChan(ctx, txn) } -func (c *collection) getAllDocKeysChan( +func (c *collection) getAllDocIDsChan( ctx context.Context, txn datastore.Txn, -) (<-chan client.DocKeysResult, error) { +) (<-chan client.DocIDResult, error) { prefix := core.PrimaryDataStoreKey{ // empty path for all keys prefix CollectionId: fmt.Sprint(c.ID()), } @@ -658,11 +658,11 @@ func (c *collection) getAllDocKeysChan( return nil, err } - resCh := make(chan client.DocKeysResult) + resCh := make(chan client.DocIDResult) go func() { defer func() { if err := q.Close(); err != nil { - log.ErrorE(ctx, "Failed to close AllDocKeys query", err) + log.ErrorE(ctx, errFailedtoCloseQueryReqAllIDs, err) } close(resCh) c.discardImplicitTxn(ctx, txn) @@ -677,23 +677,22 @@ func (c *collection) getAllDocKeysChan( // noop, just continue on the with the for loop } if res.Error != nil { - resCh <- client.DocKeysResult{ + resCh <- client.DocIDResult{ Err: res.Error, } return } - // now we have a doc key - rawDocKey := ds.NewKey(res.Key).BaseNamespace() - key, err := client.NewDocKeyFromString(rawDocKey) + rawDocID := ds.NewKey(res.Key).BaseNamespace() + docID, err := client.NewDocIDFromString(rawDocID) if err != nil { - resCh <- client.DocKeysResult{ + resCh <- client.DocIDResult{ Err: res.Error, } return } - resCh <- client.DocKeysResult{ - Key: key, + resCh <- client.DocIDResult{ + ID: docID, } } }() @@ -742,7 +741,7 @@ func (c *collection) WithTxn(txn datastore.Txn) client.Collection { } // Create a new document. -// Will verify the DocKey/CID to ensure that the new document is correctly formatted. +// Will verify the DocID/CID to ensure that the new document is correctly formatted. func (c *collection) Create(ctx context.Context, doc *client.Document) error { txn, err := c.getTxn(ctx, false) if err != nil { @@ -758,7 +757,7 @@ func (c *collection) Create(ctx context.Context, doc *client.Document) error { } // CreateMany creates a collection of documents at once. -// Will verify the DocKey/CID to ensure that the new documents are correctly formatted. +// Will verify the DocID/CID to ensure that the new documents are correctly formatted. func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) error { txn, err := c.getTxn(ctx, false) if err != nil { @@ -775,29 +774,29 @@ func (c *collection) CreateMany(ctx context.Context, docs []*client.Document) er return c.commitImplicitTxn(ctx, txn) } -func (c *collection) getKeysFromDoc( +func (c *collection) getDocIDAndPrimaryKeyFromDoc( doc *client.Document, -) (client.DocKey, core.PrimaryDataStoreKey, error) { - docKey, err := doc.GenerateDocKey() +) (client.DocID, core.PrimaryDataStoreKey, error) { + docID, err := doc.GenerateDocID() if err != nil { - return client.DocKey{}, core.PrimaryDataStoreKey{}, err + return client.DocID{}, core.PrimaryDataStoreKey{}, err } - primaryKey := c.getPrimaryKeyFromDocKey(docKey) - if primaryKey.DocKey != doc.Key().String() { - return client.DocKey{}, core.PrimaryDataStoreKey{}, - NewErrDocVerification(doc.Key().String(), primaryKey.DocKey) + primaryKey := c.getPrimaryKeyFromDocID(docID) + if primaryKey.DocID != doc.ID().String() { + return client.DocID{}, core.PrimaryDataStoreKey{}, + NewErrDocVerification(doc.ID().String(), primaryKey.DocID) } - return docKey, primaryKey, nil + return docID, primaryKey, nil } func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - // This has to be done before dockey verification happens in the next step. - if err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields); err != nil { + // This has to be done before docID verification happens in the next step. + if err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields); err != nil { return err } - dockey, primaryKey, err := c.getKeysFromDoc(doc) + docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) if err != nil { return err } @@ -808,15 +807,15 @@ func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client. return err } if exists { - return NewErrDocumentAlreadyExists(primaryKey.DocKey) + return NewErrDocumentAlreadyExists(primaryKey.DocID) } if isDeleted { - return NewErrDocumentDeleted(primaryKey.DocKey) + return NewErrDocumentDeleted(primaryKey.DocID) } // write value object marker if we have an empty doc if len(doc.Values()) == 0 { - valueKey := c.getDSKeyFromDockey(dockey) + valueKey := c.getDataStoreKeyFromDocID(docID) err = txn.Datastore().Put(ctx, valueKey.ToDS(), []byte{base.ObjectMarker}) if err != nil { return err @@ -842,7 +841,7 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { } defer c.discardImplicitTxn(ctx, txn) - primaryKey := c.getPrimaryKeyFromDocKey(doc.Key()) + primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return err @@ -851,7 +850,7 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { return client.ErrDocumentNotFound } if isDeleted { - return NewErrDocumentDeleted(primaryKey.DocKey) + return NewErrDocumentDeleted(primaryKey.DocID) } err = c.update(ctx, txn, doc) @@ -862,7 +861,7 @@ func (c *collection) Update(ctx context.Context, doc *client.Document) error { return c.commitImplicitTxn(ctx, txn) } -// Contract: DB Exists check is already performed, and a doc with the given key exists. +// Contract: DB Exists check is already performed, and a doc with the given ID exists. // Note: Should we CompareAndSet the update, IE: Query(read-only) the state, and update if changed // or, just update everything regardless. // Should probably be smart about the update due to the MerkleCRDT overhead, shouldn't @@ -884,15 +883,15 @@ func (c *collection) Save(ctx context.Context, doc *client.Document) error { } defer c.discardImplicitTxn(ctx, txn) - // Check if document already exists with key - primaryKey := c.getPrimaryKeyFromDocKey(doc.Key()) + // Check if document already exists with primary DS key. + primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return err } if isDeleted { - return NewErrDocumentDeleted(doc.Key().String()) + return NewErrDocumentDeleted(doc.ID().String()) } if exists { @@ -933,7 +932,7 @@ func (c *collection) save( // Loop through doc values // => instantiate MerkleCRDT objects // => Set/Publish new CRDT values - primaryKey := c.getPrimaryKeyFromDocKey(doc.Key()) + primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) links := make([]core.DAGLink, 0) docProperties := make(map[string]any) for k, v := range doc.Fields() { @@ -958,7 +957,7 @@ func (c *collection) save( if isSecondaryRelationID { primaryId := val.Value().(string) - err = c.patchPrimaryDoc(ctx, txn, c.Name(), relationFieldDescription, primaryKey.DocKey, primaryId) + err = c.patchPrimaryDoc(ctx, txn, c.Name(), relationFieldDescription, primaryKey.DocID, primaryId) if err != nil { return cid.Undef, err } @@ -968,7 +967,7 @@ func (c *collection) save( continue } - err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.Key().String(), fieldDescription, val.Value()) + err = c.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, doc.ID().String(), fieldDescription, val.Value()) if err != nil { return cid.Undef, err } @@ -1017,7 +1016,7 @@ func (c *collection) save( func() { c.db.events.Updates.Value().Publish( events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: headNode.Cid(), SchemaRoot: c.Schema().Root, Block: headNode, @@ -1038,7 +1037,7 @@ func (c *collection) save( func (c *collection) validateOneToOneLinkDoesntAlreadyExist( ctx context.Context, txn datastore.Txn, - docKey string, + docID string, fieldDescription client.FieldDescription, value any, ) error { @@ -1060,8 +1059,8 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( filter := fmt.Sprintf( `{_and: [{%s: {_ne: "%s"}}, {%s: {_eq: "%s"}}]}`, - request.KeyFieldName, - docKey, + request.DocIDFieldName, + docID, fieldDescription.Name, value, ) @@ -1102,7 +1101,7 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( if err != nil { return err } - return NewErrOneOneAlreadyLinked(docKey, existingDocument.GetKey(), objFieldDescription.RelationName) + return NewErrOneOneAlreadyLinked(docID, existingDocument.GetID(), objFieldDescription.RelationName) } err = selectionPlan.Close() @@ -1113,18 +1112,18 @@ func (c *collection) validateOneToOneLinkDoesntAlreadyExist( return nil } -// Delete will attempt to delete a document by key will return true if a deletion is successful, -// and return false, along with an error, if it cannot. +// Delete will attempt to delete a document by docID and return true if a deletion is successful, +// otherwise will return false, along with an error, if it cannot. // If the document doesn't exist, then it will return false, and a ErrDocumentNotFound error. -// This operation will all state relating to the given DocKey. This includes data, block, and head storage. -func (c *collection) Delete(ctx context.Context, key client.DocKey) (bool, error) { +// This operation will all state relating to the given DocID. This includes data, block, and head storage. +func (c *collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { txn, err := c.getTxn(ctx, false) if err != nil { return false, err } defer c.discardImplicitTxn(ctx, txn) - primaryKey := c.getPrimaryKeyFromDocKey(key) + primaryKey := c.getPrimaryKeyFromDocID(docID) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return false, err @@ -1133,7 +1132,7 @@ func (c *collection) Delete(ctx context.Context, key client.DocKey) (bool, error return false, client.ErrDocumentNotFound } if isDeleted { - return false, NewErrDocumentDeleted(primaryKey.DocKey) + return false, NewErrDocumentDeleted(primaryKey.DocID) } err = c.applyDelete(ctx, txn, primaryKey) @@ -1143,15 +1142,15 @@ func (c *collection) Delete(ctx context.Context, key client.DocKey) (bool, error return true, c.commitImplicitTxn(ctx, txn) } -// Exists checks if a given document exists with supplied DocKey. -func (c *collection) Exists(ctx context.Context, key client.DocKey) (bool, error) { +// Exists checks if a given document exists with supplied DocID. +func (c *collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { txn, err := c.getTxn(ctx, false) if err != nil { return false, err } defer c.discardImplicitTxn(ctx, txn) - primaryKey := c.getPrimaryKeyFromDocKey(key) + primaryKey := c.getPrimaryKeyFromDocID(docID) exists, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil && !errors.Is(err, ds.ErrNotFound) { return false, err @@ -1159,13 +1158,13 @@ func (c *collection) Exists(ctx context.Context, key client.DocKey) (bool, error return exists && !isDeleted, c.commitImplicitTxn(ctx, txn) } -// check if a document exists with the given key +// check if a document exists with the given primary key func (c *collection) exists( ctx context.Context, txn datastore.Txn, - key core.PrimaryDataStoreKey, + primaryKey core.PrimaryDataStoreKey, ) (exists bool, isDeleted bool, err error) { - val, err := txn.Datastore().Get(ctx, key.ToDS()) + val, err := txn.Datastore().Get(ctx, primaryKey.ToDS()) if err != nil && errors.Is(err, ds.ErrNotFound) { return false, false, nil } else if err != nil { @@ -1181,7 +1180,7 @@ func (c *collection) exists( func (c *collection) saveFieldToMerkleCRDT( ctx context.Context, txn datastore.Txn, - key core.DataStoreKey, + dsKey core.DataStoreKey, val client.Value, ) (ipld.Node, uint64, error) { switch val.Type() { @@ -1201,7 +1200,7 @@ func (c *collection) saveFieldToMerkleCRDT( } } - fieldID, err := strconv.Atoi(key.FieldId) + fieldID, err := strconv.Atoi(dsKey.FieldId) if err != nil { return nil, 0, err } @@ -1216,7 +1215,7 @@ func (c *collection) saveFieldToMerkleCRDT( merkleCRDT := merklecrdt.NewMerkleLWWRegister( txn, core.NewCollectionSchemaVersionKey(schema.VersionID, c.ID()), - key, + dsKey, field.Name, ) @@ -1229,16 +1228,16 @@ func (c *collection) saveFieldToMerkleCRDT( func (c *collection) saveCompositeToMerkleCRDT( ctx context.Context, txn datastore.Txn, - key core.DataStoreKey, + dsKey core.DataStoreKey, buf []byte, links []core.DAGLink, status client.DocumentStatus, ) (ipld.Node, uint64, error) { - key = key.WithFieldId(core.COMPOSITE_NAMESPACE) + dsKey = dsKey.WithFieldId(core.COMPOSITE_NAMESPACE) merkleCRDT := merklecrdt.NewMerkleCompositeDAG( txn, core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), - key, + dsKey, "", ) @@ -1278,30 +1277,30 @@ func (c *collection) commitImplicitTxn(ctx context.Context, txn datastore.Txn) e return nil } -func (c *collection) getPrimaryKeyFromDocKey(docKey client.DocKey) core.PrimaryDataStoreKey { +func (c *collection) getPrimaryKeyFromDocID(docID client.DocID) core.PrimaryDataStoreKey { return core.PrimaryDataStoreKey{ CollectionId: fmt.Sprint(c.ID()), - DocKey: docKey.String(), + DocID: docID.String(), } } -func (c *collection) getDSKeyFromDockey(docKey client.DocKey) core.DataStoreKey { +func (c *collection) getDataStoreKeyFromDocID(docID client.DocID) core.DataStoreKey { return core.DataStoreKey{ CollectionID: fmt.Sprint(c.ID()), - DocKey: docKey.String(), + DocID: docID.String(), InstanceType: core.ValueKey, } } -func (c *collection) tryGetFieldKey(key core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) { +func (c *collection) tryGetFieldKey(primaryKey core.PrimaryDataStoreKey, fieldName string) (core.DataStoreKey, bool) { fieldId, hasField := c.tryGetSchemaFieldID(fieldName) if !hasField { return core.DataStoreKey{}, false } return core.DataStoreKey{ - CollectionID: key.CollectionId, - DocKey: key.DocKey, + CollectionID: primaryKey.CollectionId, + DocID: primaryKey.DocID, FieldId: strconv.FormatUint(uint64(fieldId), 10), }, true } diff --git a/db/collection_delete.go b/db/collection_delete.go index afa7d64a92..6dbf5dfa5e 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -24,11 +24,11 @@ import ( // DeleteWith deletes a target document. // -// Target can be a Filter statement, a single docKey, a single document, -// an array of docKeys, or an array of documents. +// Target can be a Filter statement, a single DocID, a single document, +// an array of DocIDs, or an array of documents. // // If you want more type safety, use the respective typed versions of Delete. -// Eg: DeleteWithFilter or DeleteWithKey +// Eg: DeleteWithFilter or DeleteWithDocID func (c *collection) DeleteWith( ctx context.Context, target any, @@ -36,19 +36,19 @@ func (c *collection) DeleteWith( switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) - case client.DocKey: - return c.DeleteWithKey(ctx, t) - case []client.DocKey: - return c.DeleteWithKeys(ctx, t) + case client.DocID: + return c.DeleteWithDocID(ctx, t) + case []client.DocID: + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } } -// DeleteWithKey deletes using a DocKey to target a single document for delete. -func (c *collection) DeleteWithKey( +// DeleteWithDocID deletes using a DocID to target a single document for delete. +func (c *collection) DeleteWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) if err != nil { @@ -57,7 +57,7 @@ func (c *collection) DeleteWithKey( defer c.discardImplicitTxn(ctx, txn) - dsKey := c.getPrimaryKeyFromDocKey(key) + dsKey := c.getPrimaryKeyFromDocID(docID) res, err := c.deleteWithKey(ctx, txn, dsKey, client.Deleted) if err != nil { return nil, err @@ -66,10 +66,10 @@ func (c *collection) DeleteWithKey( return res, c.commitImplicitTxn(ctx, txn) } -// DeleteWithKeys is the same as DeleteWithKey but accepts multiple keys as a slice. -func (c *collection) DeleteWithKeys( +// DeleteWithDocIDs is the same as DeleteWithDocID but accepts multiple DocIDs as a slice. +func (c *collection) DeleteWithDocIDs( ctx context.Context, - keys []client.DocKey, + docIDs []client.DocID, ) (*client.DeleteResult, error) { txn, err := c.getTxn(ctx, false) if err != nil { @@ -78,7 +78,7 @@ func (c *collection) DeleteWithKeys( defer c.discardImplicitTxn(ctx, txn) - res, err := c.deleteWithKeys(ctx, txn, keys, client.Deleted) + res, err := c.deleteWithIDs(ctx, txn, docIDs, client.Deleted) if err != nil { return nil, err } @@ -112,7 +112,7 @@ func (c *collection) deleteWithKey( key core.PrimaryDataStoreKey, status client.DocumentStatus, ) (*client.DeleteResult, error) { - // Check the docKey we have been given to delete with actually has a corresponding + // Check the key we have been given to delete with actually has a corresponding // document (i.e. document actually exists in the collection). err := c.applyDelete(ctx, txn, key) if err != nil { @@ -121,38 +121,38 @@ func (c *collection) deleteWithKey( // Upon successfull deletion, record a summary. results := &client.DeleteResult{ - Count: 1, - DocKeys: []string{key.DocKey}, + Count: 1, + DocIDs: []string{key.DocID}, } return results, nil } -func (c *collection) deleteWithKeys( +func (c *collection) deleteWithIDs( ctx context.Context, txn datastore.Txn, - keys []client.DocKey, + docIDs []client.DocID, status client.DocumentStatus, ) (*client.DeleteResult, error) { results := &client.DeleteResult{ - DocKeys: make([]string, 0), + DocIDs: make([]string, 0), } - for _, key := range keys { - dsKey := c.getPrimaryKeyFromDocKey(key) + for _, docID := range docIDs { + primaryKey := c.getPrimaryKeyFromDocID(docID) // Apply the function that will perform the full deletion of this document. - err := c.applyDelete(ctx, txn, dsKey) + err := c.applyDelete(ctx, txn, primaryKey) if err != nil { return nil, err } - // Add this deleted key to our list. - results.DocKeys = append(results.DocKeys, key.String()) + // Add this deleted docID to our list. + results.DocIDs = append(results.DocIDs, docID.String()) } // Upon successfull deletion, record a summary of how many we deleted. - results.Count = int64(len(results.DocKeys)) + results.Count = int64(len(results.DocIDs)) return results, nil } @@ -186,7 +186,7 @@ func (c *collection) deleteWithFilter( }() results := &client.DeleteResult{ - DocKeys: make([]string, 0), + DocIDs: make([]string, 0), } // Keep looping until results from the selection plan have been iterated through. @@ -202,26 +202,26 @@ func (c *collection) deleteWithFilter( } doc := selectionPlan.Value() - // Extract the dockey in the string format from the document value. - docKey := doc.GetKey() - // Convert from string to client.DocKey. - key := core.PrimaryDataStoreKey{ + // Extract the docID in the string format from the document value. + docID := doc.GetID() + + primaryKey := core.PrimaryDataStoreKey{ CollectionId: fmt.Sprint(c.ID()), - DocKey: docKey, + DocID: docID, } - // Delete the document that is associated with this key we got from the filter. - err = c.applyDelete(ctx, txn, key) + // Delete the document that is associated with this DS key we got from the filter. + err = c.applyDelete(ctx, txn, primaryKey) if err != nil { return nil, err } - // Add key of successfully deleted document to our list. - results.DocKeys = append(results.DocKeys, docKey) + // Add docID of successfully deleted document to our list. + results.DocIDs = append(results.DocIDs, docID) } - results.Count = int64(len(results.DocKeys)) + results.Count = int64(len(results.DocIDs)) return results, nil } @@ -229,9 +229,9 @@ func (c *collection) deleteWithFilter( func (c *collection) applyDelete( ctx context.Context, txn datastore.Txn, - key core.PrimaryDataStoreKey, + primaryKey core.PrimaryDataStoreKey, ) error { - found, isDeleted, err := c.exists(ctx, txn, key) + found, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return err } @@ -239,10 +239,10 @@ func (c *collection) applyDelete( return client.ErrDocumentNotFound } if isDeleted { - return NewErrDocumentDeleted(key.DocKey) + return NewErrDocumentDeleted(primaryKey.DocID) } - dsKey := key.ToDataStoreKey() + dsKey := primaryKey.ToDataStoreKey() headset := clock.NewHeadSet( txn.Headstore(), @@ -278,7 +278,7 @@ func (c *collection) applyDelete( func() { c.db.events.Updates.Value().Publish( events.Update{ - DocKey: key.DocKey, + DocID: primaryKey.DocID, Cid: headNode.Cid(), SchemaRoot: c.Schema().Root, Block: headNode, diff --git a/db/collection_get.go b/db/collection_get.go index d210072793..9ab14d4424 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -20,16 +20,16 @@ import ( "github.com/sourcenetwork/defradb/db/fetcher" ) -func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { // create txn txn, err := c.getTxn(ctx, true) if err != nil { return nil, err } defer c.discardImplicitTxn(ctx, txn) - dsKey := c.getPrimaryKeyFromDocKey(key) + primaryKey := c.getPrimaryKeyFromDocID(docID) - found, isDeleted, err := c.exists(ctx, txn, dsKey) + found, isDeleted, err := c.exists(ctx, txn, primaryKey) if err != nil { return nil, err } @@ -37,7 +37,7 @@ func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted boo return nil, client.ErrDocumentNotFound } - doc, err := c.get(ctx, txn, dsKey, nil, showDeleted) + doc, err := c.get(ctx, txn, primaryKey, nil, showDeleted) if err != nil { return nil, err } @@ -47,7 +47,7 @@ func (c *collection) Get(ctx context.Context, key client.DocKey, showDeleted boo func (c *collection) get( ctx context.Context, txn datastore.Txn, - key core.PrimaryDataStoreKey, + primaryKey core.PrimaryDataStoreKey, fields []client.FieldDescription, showDeleted bool, ) (*client.Document, error) { @@ -60,8 +60,8 @@ func (c *collection) get( return nil, err } - // construct target key for DocKey - targetKey := base.MakeDocKey(c.Description(), key.DocKey) + // construct target DS key from DocID. + targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(c.Description(), primaryKey.DocID) // run the doc fetcher err = df.Start(ctx, core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd()))) if err != nil { diff --git a/db/collection_index.go b/db/collection_index.go index 278586902b..c724205805 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -125,7 +125,7 @@ func (c *collection) updateIndexedDoc( oldDoc, err := c.get( ctx, txn, - c.getPrimaryKeyFromDocKey(doc.Key()), desc.CollectIndexedFields(&schema), + c.getPrimaryKeyFromDocID(doc.ID()), desc.CollectIndexedFields(&schema), false, ) if err != nil { @@ -239,7 +239,7 @@ func (c *collection) iterateAllDocs( _ = df.Close() return err } - start := base.MakeCollectionKey(c.Description()) + start := base.MakeDataStoreKeyWithCollectionDescription(c.Description()) spans := core.NewSpans(core.NewSpan(start, start.PrefixEnd())) err = df.Start(ctx, spans) diff --git a/db/collection_update.go b/db/collection_update.go index e6dbc4617a..bdfbc0ddd5 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -26,10 +26,10 @@ import ( ) // UpdateWith updates a target document using the given updater type. Target -// can be a Filter statement, a single docKey, a single document, -// an array of docKeys, or an array of documents. +// can be a Filter statement, a single DocID, a single document, +// an array of DocIDs, or an array of documents. // If you want more type safety, use the respective typed versions of Update. -// Eg: UpdateWithFilter or UpdateWithKey +// Eg: UpdateWithFilter or UpdateWithDocID func (c *collection) UpdateWith( ctx context.Context, target any, @@ -38,10 +38,10 @@ func (c *collection) UpdateWith( switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) - case client.DocKey: - return c.UpdateWithKey(ctx, t, updater) - case []client.DocKey: - return c.UpdateWithKeys(ctx, t, updater) + case client.DocID: + return c.UpdateWithDocID(ctx, t, updater) + case []client.DocID: + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -67,12 +67,12 @@ func (c *collection) UpdateWithFilter( return res, c.commitImplicitTxn(ctx, txn) } -// UpdateWithKey updates using a DocKey to target a single document for update. +// UpdateWithDocID updates using a DocID to target a single document for update. // An updater value is provided, which could be a string Patch, string Merge Patch // or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithKey( +func (c *collection) UpdateWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { txn, err := c.getTxn(ctx, false) @@ -80,7 +80,7 @@ func (c *collection) UpdateWithKey( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithKey(ctx, txn, key, updater) + res, err := c.updateWithDocID(ctx, txn, docID, updater) if err != nil { return nil, err } @@ -88,12 +88,12 @@ func (c *collection) UpdateWithKey( return res, c.commitImplicitTxn(ctx, txn) } -// UpdateWithKeys is the same as UpdateWithKey but accepts multiple keys as a slice. +// UpdateWithDocIDs is the same as UpdateWithDocID but accepts multiple DocIDs as a slice. // An updater value is provided, which could be a string Patch, string Merge Patch // or a parsed Patch, or parsed Merge Patch. -func (c *collection) UpdateWithKeys( +func (c *collection) UpdateWithDocIDs( ctx context.Context, - keys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { txn, err := c.getTxn(ctx, false) @@ -101,7 +101,7 @@ func (c *collection) UpdateWithKeys( return nil, err } defer c.discardImplicitTxn(ctx, txn) - res, err := c.updateWithKeys(ctx, txn, keys, updater) + res, err := c.updateWithIDs(ctx, txn, docIDs, updater) if err != nil { return nil, err } @@ -109,10 +109,10 @@ func (c *collection) UpdateWithKeys( return res, c.commitImplicitTxn(ctx, txn) } -func (c *collection) updateWithKey( +func (c *collection) updateWithDocID( ctx context.Context, txn datastore.Txn, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { parsedUpdater, err := fastjson.Parse(updater) @@ -127,7 +127,7 @@ func (c *collection) updateWithKey( return nil, client.ErrInvalidUpdater } - doc, err := c.Get(ctx, key, false) + doc, err := c.Get(ctx, docID, false) if err != nil { return nil, err } @@ -147,16 +147,16 @@ func (c *collection) updateWithKey( } results := &client.UpdateResult{ - Count: 1, - DocKeys: []string{key.String()}, + Count: 1, + DocIDs: []string{docID.String()}, } return results, nil } -func (c *collection) updateWithKeys( +func (c *collection) updateWithIDs( ctx context.Context, txn datastore.Txn, - keys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { parsedUpdater, err := fastjson.Parse(updater) @@ -172,10 +172,10 @@ func (c *collection) updateWithKeys( } results := &client.UpdateResult{ - DocKeys: make([]string, len(keys)), + DocIDs: make([]string, len(docIDs)), } - for i, key := range keys { - doc, err := c.Get(ctx, key, false) + for i, docIDs := range docIDs { + doc, err := c.Get(ctx, docIDs, false) if err != nil { return nil, err } @@ -194,7 +194,7 @@ func (c *collection) updateWithKeys( return nil, err } - results.DocKeys[i] = key.String() + results.DocIDs[i] = docIDs.String() results.Count++ } return results, nil @@ -245,7 +245,7 @@ func (c *collection) updateWithFilter( }() results := &client.UpdateResult{ - DocKeys: make([]string, 0), + DocIDs: make([]string, 0), } docMap := selectionPlan.DocumentMap() @@ -283,7 +283,7 @@ func (c *collection) updateWithFilter( } // add successful updated doc to results - results.DocKeys = append(results.DocKeys, doc.Key().String()) + results.DocIDs = append(results.DocIDs, doc.ID().String()) results.Count++ } @@ -341,21 +341,21 @@ func (c *collection) isSecondaryIDField(fieldDesc client.FieldDescription) (clie return relationFieldDescription, valid && !relationFieldDescription.IsPrimaryRelation() } -// patchPrimaryDoc patches the (primary) document linked to from the document of the given dockey via the +// patchPrimaryDoc patches the (primary) document linked to from the document of the given DocID via the // given (secondary) relationship field description (hosted on the collection of the document matching the -// given dockey). +// given DocID). // -// The given field value should be the string representation of the dockey of the primary document to be +// The given field value should be the string representation of the DocID of the primary document to be // patched. func (c *collection) patchPrimaryDoc( ctx context.Context, txn datastore.Txn, secondaryCollectionName string, relationFieldDescription client.FieldDescription, - docKey string, + docID string, fieldValue string, ) error { - primaryDockey, err := client.NewDocKeyFromString(fieldValue) + primaryDocID, err := client.NewDocIDFromString(fieldValue) if err != nil { return err } @@ -384,7 +384,7 @@ func (c *collection) patchPrimaryDoc( doc, err := primaryCol.Get( ctx, - primaryDockey, + primaryDocID, false, ) if err != nil && !errors.Is(err, ds.ErrNotFound) { @@ -401,11 +401,11 @@ func (c *collection) patchPrimaryDoc( return err } - if existingVal != nil && existingVal.Value() != "" && existingVal.Value() != docKey { - return NewErrOneOneAlreadyLinked(docKey, fieldValue, relationFieldDescription.RelationName) + if existingVal != nil && existingVal.Value() != "" && existingVal.Value() != docID { + return NewErrOneOneAlreadyLinked(docID, fieldValue, relationFieldDescription.RelationName) } - err = doc.Set(primaryIDField.Name, docKey) + err = doc.Set(primaryIDField.Name, docID) if err != nil { return err } @@ -424,7 +424,7 @@ func (c *collection) patchPrimaryDoc( // the typed value again as an interface. func validateFieldSchema(val *fastjson.Value, field client.FieldDescription) (any, error) { switch field.Kind { - case client.FieldKind_DocKey, client.FieldKind_STRING: + case client.FieldKind_DocID, client.FieldKind_STRING: return getString(val) case client.FieldKind_STRING_ARRAY: diff --git a/db/errors.go b/db/errors.go index 67f74db296..ca5e09e107 100644 --- a/db/errors.go +++ b/db/errors.go @@ -47,8 +47,8 @@ const ( errFieldKindNotFound string = "no type found for given name" errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" errSchemaNotFound string = "no schema found for given name" - errDocumentAlreadyExists string = "a document with the given dockey already exists" - errDocumentDeleted string = "a document with the given dockey has been deleted" + errDocumentAlreadyExists string = "a document with the given ID already exists" + errDocumentDeleted string = "a document with the given ID has been deleted" errIndexMissingFields string = "index missing fields" errNonZeroIndexIDProvided string = "non-zero index ID provided" errIndexFieldMissingName string = "index field missing name" @@ -73,9 +73,10 @@ const ( errIndexDescHasNonExistingField string = "index description has non existing field" errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" errCreateFile string = "failed to create file" + errRemoveFile string = "failed to remove file" errOpenFile string = "failed to open file" errCloseFile string = "failed to close file" - errRemoveFile string = "failed to remove file" + errFailedtoCloseQueryReqAllIDs string = "failed to close query requesting all docIDs" errFailedToReadByte string = "failed to read byte" errFailedToWriteString string = "failed to write string" errJSONDecode string = "failed to decode JSON" @@ -91,82 +92,23 @@ const ( ) var ( - ErrFailedToGetHeads = errors.New(errFailedToGetHeads) - ErrFailedToCreateCollectionQuery = errors.New(errFailedToCreateCollectionQuery) - ErrFailedToGetCollection = errors.New(errFailedToGetCollection) - ErrFailedToGetAllCollections = errors.New(errFailedToGetAllCollections) - // ErrDocVerification occurs when a documents contents fail the verification during a Create() - // call against the supplied Document Key. - ErrDocVerification = errors.New(errDocVerification) - ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") - ErrDeleteTargetEmpty = errors.New("the doc delete targeter cannot be empty") - ErrDeleteEmpty = errors.New("the doc delete cannot be empty") - ErrUpdateTargetEmpty = errors.New("the doc update targeter cannot be empty") - ErrUpdateEmpty = errors.New("the doc update cannot be empty") - ErrInvalidMergeValueType = errors.New( - "the type of value in the merge patch doesn't match the schema", - ) - ErrMissingDocFieldToUpdate = errors.New("missing document field to update") - ErrDocMissingKey = errors.New("document is missing key") - ErrInvalidFilter = errors.New("invalid filter") - ErrInvalidOpPath = errors.New("invalid patch op path") - ErrDocumentAlreadyExists = errors.New(errDocumentAlreadyExists) - ErrDocumentDeleted = errors.New(errDocumentDeleted) - ErrUnknownCRDTArgument = errors.New("invalid CRDT arguments") - ErrCollectionAlreadyExists = errors.New("collection already exists") - ErrCollectionNameEmpty = errors.New("collection name can't be empty") - ErrSchemaNameEmpty = errors.New("schema name can't be empty") - ErrSchemaRootEmpty = errors.New("schema root can't be empty") - ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") - ErrKeyEmpty = errors.New("key cannot be empty") - ErrAddingP2PCollection = errors.New(errAddingP2PCollection) - ErrRemovingP2PCollection = errors.New(errRemovingP2PCollection) - ErrAddCollectionWithPatch = errors.New(errAddCollectionWithPatch) - ErrCollectionIDDoesntMatch = errors.New(errCollectionIDDoesntMatch) - ErrSchemaRootDoesntMatch = errors.New(errSchemaRootDoesntMatch) - ErrCannotModifySchemaName = errors.New(errCannotModifySchemaName) - ErrCannotSetVersionID = errors.New(errCannotSetVersionID) - ErrCannotSetFieldID = errors.New(errCannotSetFieldID) - ErrRelationalFieldMissingSchema = errors.New(errRelationalFieldMissingSchema) - ErrRelationalFieldInvalidRelationType = errors.New(errRelationalFieldInvalidRelationType) - ErrRelationalFieldMissingIDField = errors.New(errRelationalFieldMissingIDField) - ErrRelationalFieldMissingRelationName = errors.New(errRelationalFieldMissingRelationName) - ErrPrimarySideNotDefined = errors.New(errPrimarySideNotDefined) - ErrPrimarySideOnMany = errors.New(errPrimarySideOnMany) - ErrBothSidesPrimary = errors.New(errBothSidesPrimary) - ErrRelatedFieldKindMismatch = errors.New(errRelatedFieldKindMismatch) - ErrRelatedFieldRelationTypeMismatch = errors.New(errRelatedFieldRelationTypeMismatch) - ErrRelationalFieldIDInvalidType = errors.New(errRelationalFieldIDInvalidType) - ErrDuplicateField = errors.New(errDuplicateField) - ErrCannotMutateField = errors.New(errCannotMutateField) - ErrCannotMoveField = errors.New(errCannotMoveField) - ErrInvalidCRDTType = errors.New(errInvalidCRDTType) - ErrCannotDeleteField = errors.New(errCannotDeleteField) - ErrFieldKindNotFound = errors.New(errFieldKindNotFound) - ErrFieldKindDoesNotMatchFieldSchema = errors.New(errFieldKindDoesNotMatchFieldSchema) - ErrSchemaNotFound = errors.New(errSchemaNotFound) - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) - ErrIndexFieldMissingDirection = errors.New(errIndexFieldMissingDirection) - ErrIndexSingleFieldWrongDirection = errors.New(errIndexSingleFieldWrongDirection) - ErrCorruptedIndex = errors.New(errCorruptedIndex) - ErrCanNotChangeIndexWithPatch = errors.New(errCanNotChangeIndexWithPatch) - ErrFieldOrAliasToFieldNotExist = errors.New(errFieldOrAliasToFieldNotExist) - ErrCreateFile = errors.New(errCreateFile) - ErrOpenFile = errors.New(errOpenFile) - ErrCloseFile = errors.New(errCloseFile) - ErrRemoveFile = errors.New(errRemoveFile) - ErrFailedToReadByte = errors.New(errFailedToReadByte) - ErrFailedToWriteString = errors.New(errFailedToWriteString) - ErrJSONDecode = errors.New(errJSONDecode) - ErrDocFromMap = errors.New(errDocFromMap) - ErrDocCreate = errors.New(errDocCreate) - ErrDocUpdate = errors.New(errDocUpdate) - ErrExpectedJSONObject = errors.New(errExpectedJSONObject) - ErrExpectedJSONArray = errors.New(errExpectedJSONArray) - ErrOneOneAlreadyLinked = errors.New(errOneOneAlreadyLinked) - ErrIndexDoesNotMatchName = errors.New(errIndexDoesNotMatchName) - ErrInvalidViewQuery = errors.New(errInvalidViewQuery) + ErrFailedToGetCollection = errors.New(errFailedToGetCollection) + ErrSubscriptionsNotAllowed = errors.New("server does not accept subscriptions") + ErrInvalidFilter = errors.New("invalid filter") + ErrCollectionAlreadyExists = errors.New("collection already exists") + ErrCollectionNameEmpty = errors.New("collection name can't be empty") + ErrSchemaNameEmpty = errors.New("schema name can't be empty") + ErrSchemaRootEmpty = errors.New("schema root can't be empty") + ErrSchemaVersionIDEmpty = errors.New("schema version ID can't be empty") + ErrKeyEmpty = errors.New("key cannot be empty") + ErrCannotSetVersionID = errors.New(errCannotSetVersionID) + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexFieldMissingName = errors.New(errIndexFieldMissingName) + ErrIndexSingleFieldWrongDirection = errors.New(errIndexSingleFieldWrongDirection) + ErrCorruptedIndex = errors.New(errCorruptedIndex) + ErrExpectedJSONObject = errors.New(errExpectedJSONObject) + ErrExpectedJSONArray = errors.New(errExpectedJSONArray) + ErrInvalidViewQuery = errors.New(errInvalidViewQuery) ) // NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. @@ -245,6 +187,9 @@ func NewErrFailedToGetAllCollections(inner error) error { } // NewErrDocVerification returns a new error indicating that the document verification failed. +// +// This occurs when a documents contents fail the verification during a Create() +// call against the supplied Document ID (docID). func NewErrDocVerification(expected string, actual string) error { return errors.New( errDocVerification, @@ -450,17 +395,17 @@ func NewErrCannotDeleteField(name string, id client.FieldID) error { ) } -func NewErrDocumentAlreadyExists(dockey string) error { +func NewErrDocumentAlreadyExists(docID string) error { return errors.New( errDocumentAlreadyExists, - errors.NewKV("DocKey", dockey), + errors.NewKV("DocID", docID), ) } -func NewErrDocumentDeleted(dockey string) error { +func NewErrDocumentDeleted(docID string) error { return errors.New( errDocumentDeleted, - errors.NewKV("DocKey", dockey), + errors.NewKV("DocID", docID), ) } @@ -635,10 +580,10 @@ func NewErrIndexDoesNotMatchName(index, name string) error { ) } -func NewErrCanNotIndexNonUniqueField(dockey, fieldName string, value any) error { +func NewErrCanNotIndexNonUniqueField(docID, fieldName string, value any) error { return errors.New( errCanNotIndexNonUniqueField, - errors.NewKV("Dockey", dockey), + errors.NewKV("DocID", docID), errors.NewKV("Field name", fieldName), errors.NewKV("Field value", value), ) diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index bc22471465..dc9291fb0d 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -19,16 +19,20 @@ import ( ) type EncodedDocument interface { - // Key returns the key of the document - Key() []byte + // ID returns the ID of the document + ID() []byte + SchemaVersionID() string + // Status returns the document status. // // For example, whether it is deleted or active. Status() client.DocumentStatus + // Properties returns a copy of the decoded property values mapped by their field // description. Properties(onlyFilterProps bool) (map[client.FieldDescription]any, error) + // Reset re-initializes the EncodedDocument object. Reset() } @@ -61,7 +65,7 @@ func (e encProperty) Decode() (any, error) { // @todo: Implement Encoded Document type type encodedDocument struct { - key []byte + id []byte schemaVersionID string status client.DocumentStatus properties map[client.FieldDescription]*encProperty @@ -78,8 +82,8 @@ type encodedDocument struct { var _ EncodedDocument = (*encodedDocument)(nil) -func (encdoc *encodedDocument) Key() []byte { - return encdoc.key +func (encdoc *encodedDocument) ID() []byte { + return encdoc.id } func (encdoc *encodedDocument) SchemaVersionID() string { @@ -93,7 +97,7 @@ func (encdoc *encodedDocument) Status() client.DocumentStatus { // Reset re-initializes the EncodedDocument object. func (encdoc *encodedDocument) Reset() { encdoc.properties = make(map[client.FieldDescription]*encProperty, 0) - encdoc.key = nil + encdoc.id = nil encdoc.filterSet = nil encdoc.selectSet = nil encdoc.schemaVersionID = "" @@ -103,12 +107,12 @@ func (encdoc *encodedDocument) Reset() { // Decode returns a properly decoded document object func Decode(encdoc EncodedDocument) (*client.Document, error) { - key, err := client.NewDocKeyFromString(string(encdoc.Key())) + docID, err := client.NewDocIDFromString(string(encdoc.ID())) if err != nil { return nil, err } - doc := client.NewDocWithKey(key) + doc := client.NewDocWithID(docID) properties, err := encdoc.Properties(false) if err != nil { return nil, err @@ -141,8 +145,8 @@ func (encdoc *encodedDocument) MergeProperties(other EncodedDocument) { for field, prop := range otherEncDoc.properties { encdoc.properties[field] = prop } - if other.Key() != nil { - encdoc.key = other.Key() + if other.ID() != nil { + encdoc.id = other.ID() } if other.SchemaVersionID() != "" { encdoc.schemaVersionID = other.SchemaVersionID() @@ -153,7 +157,7 @@ func (encdoc *encodedDocument) MergeProperties(other EncodedDocument) { // map of field/value pairs func DecodeToDoc(encdoc EncodedDocument, mapping *core.DocumentMapping, filter bool) (core.Doc, error) { doc := mapping.NewDoc() - doc.SetKey(string(encdoc.Key())) + doc.SetID(string(encdoc.ID())) properties, err := encdoc.Properties(filter) if err != nil { diff --git a/db/fetcher/fetcher.go b/db/fetcher/fetcher.go index da7a0df1e1..a9cb39d9d5 100644 --- a/db/fetcher/fetcher.go +++ b/db/fetcher/fetcher.go @@ -127,7 +127,7 @@ type DocumentFetcher struct { // Since deleted documents are stored under a different instance type than active documents, // we use a parallel fetcher to be able to return the documents in the expected order. - // That being lexicographically ordered dockeys. + // That being lexicographically ordered docIDs. deletedDocFetcher *DocumentFetcher execInfo ExecInfo @@ -250,7 +250,7 @@ func (df *DocumentFetcher) start(ctx context.Context, spans core.Spans, withDele df.deletedDocs = withDeleted if !spans.HasValue { // no specified spans so create a prefix scan key for the entire collection - start := base.MakeCollectionKey(df.col.Description()) + start := base.MakeDataStoreKeyWithCollectionDescription(df.col.Description()) if withDeleted { start = start.WithDeletedFlag() } else { @@ -334,7 +334,7 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone if seekNext { curKey := df.kv.Key - curKey.FieldId = "" // clear field so prefixEnd applies to dockey + curKey.FieldId = "" // clear field so prefixEnd applies to docID seekKey := curKey.PrefixEnd().ToString() spanDone, df.kv, err = df.seekKV(seekKey) // handle any internal errors @@ -370,7 +370,7 @@ func (df *DocumentFetcher) nextKey(ctx context.Context, seekNext bool) (spanDone } // check if we've crossed document boundries - if (df.doc.key != nil && df.kv.Key.DocKey != string(df.doc.key)) || seekNext { + if (df.doc.id != nil && df.kv.Key.DocID != string(df.doc.id)) || seekNext { df.isReadingDocument = false return false, true, nil } @@ -472,10 +472,10 @@ func (df *DocumentFetcher) processKV(kv *keyValue) error { if df.filterSet != nil { df.doc.filterSet = bitset.New(df.filterSet.Len()) if df.filterSet.Test(0) { - df.doc.filterSet.Set(0) // mark dockey as set + df.doc.filterSet.Set(0) // mark docID as set } } - df.doc.key = []byte(kv.Key.DocKey) + df.doc.id = []byte(kv.Key.DocID) df.passedFilter = false df.ranFilter = false @@ -534,15 +534,15 @@ func (df *DocumentFetcher) FetchNext(ctx context.Context) (EncodedDocument, Exec var resultExecInfo ExecInfo // If the deletedDocFetcher isn't nil, this means that the user requested to include the deleted documents - // in the query. To keep the active and deleted docs in lexicographic order of dockeys, we use the two distinct - // fetchers and fetch the one that has the next lowest (or highest if requested in reverse order) dockey value. + // in the query. To keep the active and deleted docs in lexicographic order of docIDs, we use the two distinct + // fetchers and fetch the one that has the next lowest (or highest if requested in reverse order) docID value. ddf := df.deletedDocFetcher if ddf != nil { // If we've reached the end of the deleted docs, we can skip to getting the next active docs. if !ddf.kvEnd { if df.kvEnd || - (df.reverse && ddf.kv.Key.DocKey > df.kv.Key.DocKey) || - (!df.reverse && ddf.kv.Key.DocKey < df.kv.Key.DocKey) { + (df.reverse && ddf.kv.Key.DocID > df.kv.Key.DocID) || + (!df.reverse && ddf.kv.Key.DocID < df.kv.Key.DocID) { encdoc, execInfo, err := ddf.FetchNext(ctx) if err != nil { return nil, ExecInfo{}, err @@ -573,7 +573,7 @@ func (df *DocumentFetcher) fetchNext(ctx context.Context) (EncodedDocument, Exec if df.kv == nil { return nil, ExecInfo{}, client.NewErrUninitializeProperty("DocumentFetcher", "kv") } - // save the DocKey of the current kv pair so we can track when we cross the doc pair boundries + // save the DocID of the current kv pair so we can track when we cross the doc pair boundries // keyparts := df.kv.Key.List() // key := keyparts[len(keyparts)-2] diff --git a/db/fetcher/indexer.go b/db/fetcher/indexer.go index 6b4833d00f..b8608e2b7d 100644 --- a/db/fetcher/indexer.go +++ b/db/fetcher/indexer.go @@ -129,15 +129,15 @@ func (f *IndexFetcher) FetchNext(ctx context.Context) (EncodedDocument, ExecInfo } if f.indexDesc.Unique { - f.doc.key = res.value + f.doc.id = res.value } else { - f.doc.key = res.key.FieldValues[1] + f.doc.id = res.key.FieldValues[1] } f.doc.properties[f.indexedField] = property f.execInfo.FieldsFetched++ if f.docFetcher != nil && len(f.docFields) > 0 { - targetKey := base.MakeDocKey(f.col.Description(), string(f.doc.key)) + targetKey := base.MakeDataStoreKeyWithCollectionAndDocID(f.col.Description(), string(f.doc.id)) spans := core.NewSpans(core.NewSpan(targetKey, targetKey.PrefixEnd())) err := f.docFetcher.Start(ctx, spans) if err != nil { diff --git a/db/fetcher/mocks/encoded_document.go b/db/fetcher/mocks/encoded_document.go index 538d32ff4d..5d9382a14d 100644 --- a/db/fetcher/mocks/encoded_document.go +++ b/db/fetcher/mocks/encoded_document.go @@ -21,8 +21,8 @@ func (_m *EncodedDocument) EXPECT() *EncodedDocument_Expecter { return &EncodedDocument_Expecter{mock: &_m.Mock} } -// Key provides a mock function with given fields: -func (_m *EncodedDocument) Key() []byte { +// ID provides a mock function with given fields: +func (_m *EncodedDocument) ID() []byte { ret := _m.Called() var r0 []byte @@ -37,29 +37,29 @@ func (_m *EncodedDocument) Key() []byte { return r0 } -// EncodedDocument_Key_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Key' -type EncodedDocument_Key_Call struct { +// EncodedDocument_ID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ID' +type EncodedDocument_ID_Call struct { *mock.Call } -// Key is a helper method to define mock.On call -func (_e *EncodedDocument_Expecter) Key() *EncodedDocument_Key_Call { - return &EncodedDocument_Key_Call{Call: _e.mock.On("Key")} +// ID is a helper method to define mock.On call +func (_e *EncodedDocument_Expecter) ID() *EncodedDocument_ID_Call { + return &EncodedDocument_ID_Call{Call: _e.mock.On("ID")} } -func (_c *EncodedDocument_Key_Call) Run(run func()) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_ID_Call) Run(run func()) *EncodedDocument_ID_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } -func (_c *EncodedDocument_Key_Call) Return(_a0 []byte) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_ID_Call) Return(_a0 []byte) *EncodedDocument_ID_Call { _c.Call.Return(_a0) return _c } -func (_c *EncodedDocument_Key_Call) RunAndReturn(run func() []byte) *EncodedDocument_Key_Call { +func (_c *EncodedDocument_ID_Call) RunAndReturn(run func() []byte) *EncodedDocument_ID_Call { _c.Call.Return(run) return _c } diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index 454bcf17c6..fc232bb9c7 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -86,7 +86,7 @@ type VersionedFetcher struct { root datastore.RootStore store datastore.Txn - key core.DataStoreKey + dsKey core.DataStoreKey version cid.Cid queuedCids *list.List @@ -144,25 +144,25 @@ func (vf *VersionedFetcher) Start(ctx context.Context, spans core.Spans) error { } // For the VersionedFetcher, the spans needs to be in the format - // Span{Start: DocKey, End: CID} + // Span{Start: DocID, End: CID} dk := spans.Value[0].Start() cidRaw := spans.Value[0].End() - if dk.DocKey == "" { - return client.NewErrUninitializeProperty("Spans", "DocKey") - } else if cidRaw.DocKey == "" { // todo: dont abuse DataStoreKey/Span like this! + if dk.DocID == "" { + return client.NewErrUninitializeProperty("Spans", "DocID") + } else if cidRaw.DocID == "" { // todo: dont abuse DataStoreKey/Span like this! return client.NewErrUninitializeProperty("Spans", "CID") } // decode cidRaw from core.Key to cid.Cid // need to remove '/' prefix from the core.Key - c, err := cid.Decode(cidRaw.DocKey) + c, err := cid.Decode(cidRaw.DocID) if err != nil { return NewErrFailedToDecodeCIDForVFetcher(err) } vf.ctx = ctx - vf.key = dk + vf.dsKey = dk vf.version = c if err := vf.seekTo(vf.version); err != nil { @@ -180,7 +180,7 @@ func (vf *VersionedFetcher) Rootstore() ds.Datastore { // Start a fetcher with the needed info (cid embedded in a span) /* -1. Init with DocKey (VersionedFetched is scoped to a single doc) +1. Init with DocID (VersionedFetched is scoped to a single doc) 2. - Create transient stores (head, data, block) 3. Start with a given Txn and CID span set (length 1 for now) 4. call traverse with the target cid @@ -258,8 +258,8 @@ func (vf *VersionedFetcher) seekNext(c cid.Cid, topParent bool) error { // check if cid block exists in the global store, handle err // @todo: Find an efficient way to determine if a CID is a member of a - // DocKey State graph - // @body: We could possibly append the DocKey to the CID either as a + // DocID State graph + // @body: We could possibly append the DocID to the CID either as a // child key, or an instance on the CID key. hasLocalBlock, err := vf.store.DAGstore().Has(vf.ctx, c) @@ -380,7 +380,7 @@ func (vf *VersionedFetcher) processNode( // handle CompositeDAG mcrdt, exists := vf.mCRDTs[crdtIndex] if !exists { - key, err := base.MakePrimaryIndexKeyForCRDT(vf.col.Description(), vf.col.Schema(), ctype, vf.key, fieldName) + dsKey, err := base.MakePrimaryIndexKeyForCRDT(vf.col.Description(), vf.col.Schema(), ctype, vf.dsKey, fieldName) if err != nil { return err } @@ -388,7 +388,7 @@ func (vf *VersionedFetcher) processNode( vf.store, core.CollectionSchemaVersionKey{}, ctype, - key, + dsKey, fieldName, ) if err != nil { @@ -429,7 +429,7 @@ func (vf *VersionedFetcher) Close() error { } // NewVersionedSpan creates a new VersionedSpan from a DataStoreKey and a version CID. -func NewVersionedSpan(dockey core.DataStoreKey, version cid.Cid) core.Spans { +func NewVersionedSpan(dsKey core.DataStoreKey, version cid.Cid) core.Spans { // Todo: Dont abuse DataStoreKey for version cid! - return core.NewSpans(core.NewSpan(dockey, core.DataStoreKey{DocKey: version.String()})) + return core.NewSpans(core.NewSpan(dsKey, core.DataStoreKey{DocID: version.String()})) } diff --git a/db/index.go b/db/index.go index 804eac492e..693df4a5f1 100644 --- a/db/index.go +++ b/db/index.go @@ -212,7 +212,7 @@ func (i *collectionSimpleIndex) getDocumentsIndexKey( return core.IndexDataStoreKey{}, err } - key.FieldValues = append(key.FieldValues, []byte(doc.Key().String())) + key.FieldValues = append(key.FieldValues, []byte(doc.ID().String())) return key, nil } @@ -280,7 +280,7 @@ func (i *collectionUniqueIndex) Save( if exists { return i.newUniqueIndexError(doc) } - err = txn.Datastore().Put(ctx, key.ToDS(), []byte(doc.Key().String())) + err = txn.Datastore().Put(ctx, key.ToDS(), []byte(doc.ID().String())) if err != nil { return NewErrFailedToStoreIndexedField(key.ToDS().String(), err) } @@ -294,7 +294,7 @@ func (i *collectionUniqueIndex) newUniqueIndexError( if err != nil { return err } - return NewErrCanNotIndexNonUniqueField(doc.Key().String(), i.fieldDesc.Name, fieldVal.Value()) + return NewErrCanNotIndexNonUniqueField(doc.ID().String(), i.fieldDesc.Name, fieldVal.Value()) } func (i *collectionUniqueIndex) Update( diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 4110463c09..38309bf745 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -176,7 +176,7 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { key.FieldValues = [][]byte{fieldBytesVal} if !b.isUnique { - key.FieldValues = append(key.FieldValues, []byte(b.doc.Key().String())) + key.FieldValues = append(key.FieldValues, []byte(b.doc.ID().String())) } } else if len(b.values) > 0 { key.FieldValues = b.values @@ -376,13 +376,13 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t require.NoError(f.t, err) f.commitTxn() - userDocKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(userDoc).Build() - prodDocKey := newIndexKeyBuilder(f).Col(productsColName).Field(productsCategoryFieldName).Doc(prodDoc).Build() + userDocID := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(userDoc).Build() + prodDocID := newIndexKeyBuilder(f).Col(productsColName).Field(productsCategoryFieldName).Doc(prodDoc).Build() - data, err := f.txn.Datastore().Get(f.ctx, userDocKey.ToDS()) + data, err := f.txn.Datastore().Get(f.ctx, userDocID.ToDS()) require.NoError(t, err) assert.Len(t, data, 0) - data, err = f.txn.Datastore().Get(f.ctx, prodDocKey.ToDS()) + data, err = f.txn.Datastore().Get(f.ctx, prodDocID.ToDS()) require.NoError(t, err) assert.Len(t, data, 0) } @@ -619,7 +619,7 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) fieldKeyString := core.DataStoreKey{ CollectionID: f.users.Description().IDString(), - }.WithDocKey(doc.Key().String()). + }.WithDocID(doc.ID().String()). WithFieldId("1"). WithValueFlag(). ToString() @@ -923,7 +923,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { require.NoError(t, err) encodedDoc := shimEncodedDocument{ - key: []byte(doc.Key().String()), + key: []byte(doc.ID().String()), schemaVersionID: f.users.Schema().VersionID, } @@ -987,7 +987,7 @@ type shimEncodedDocument struct { var _ fetcher.EncodedDocument = (*shimEncodedDocument)(nil) -func (encdoc *shimEncodedDocument) Key() []byte { +func (encdoc *shimEncodedDocument) ID() []byte { return encdoc.key } @@ -1026,10 +1026,10 @@ func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { data, err := f.txn.Datastore().Get(f.ctx, key1.ToDS()) require.NoError(t, err, key1.ToString()) - assert.Equal(t, data, []byte(doc1.Key().String())) + assert.Equal(t, data, []byte(doc1.ID().String())) data, err = f.txn.Datastore().Get(f.ctx, key2.ToDS()) require.NoError(t, err) - assert.Equal(t, data, []byte(doc2.Key().String())) + assert.Equal(t, data, []byte(doc2.ID().String())) } func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { @@ -1052,7 +1052,7 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { data, err := f.txn.Datastore().Get(f.ctx, key.ToDS()) require.NoError(t, err) - assert.Equal(t, data, []byte(doc.Key().String())) + assert.Equal(t, data, []byte(doc.ID().String())) } func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { diff --git a/db/subscriptions.go b/db/subscriptions.go index af981ad95f..2e7d2d4123 100644 --- a/db/subscriptions.go +++ b/db/subscriptions.go @@ -74,7 +74,7 @@ func (db *db) handleEvent( ) { p := planner.New(ctx, db.WithTxn(txn), txn) - s := r.ToSelect(evt.DocKey, evt.Cid.String()) + s := r.ToSelect(evt.DocID, evt.Cid.String()) result, err := p.RunSubscriptionRequest(ctx, s) if err != nil { diff --git a/docs/cli/defradb_client.md b/docs/cli/defradb_client.md index a52fce09f3..c3bd354bc7 100644 --- a/docs/cli/defradb_client.md +++ b/docs/cli/defradb_client.md @@ -38,4 +38,5 @@ Execute queries, add schema types, obtain node info, etc. * [defradb client query](defradb_client_query.md) - Send a DefraDB GraphQL query request * [defradb client schema](defradb_client_schema.md) - Interact with the schema system of a DefraDB node * [defradb client tx](defradb_client_tx.md) - Create, commit, and discard DefraDB transactions +* [defradb client view](defradb_client_view.md) - Manage views within a running DefraDB instance diff --git a/docs/cli/defradb_client_collection.md b/docs/cli/defradb_client_collection.md index 7807f49503..d164902b97 100644 --- a/docs/cli/defradb_client_collection.md +++ b/docs/cli/defradb_client_collection.md @@ -33,9 +33,9 @@ Create, read, update, and delete documents within a collection. * [defradb client](defradb_client.md) - Interact with a DefraDB node * [defradb client collection create](defradb_client_collection_create.md) - Create a new document. -* [defradb client collection delete](defradb_client_collection_delete.md) - Delete documents by key or filter. +* [defradb client collection delete](defradb_client_collection_delete.md) - Delete documents by docID or filter. * [defradb client collection describe](defradb_client_collection_describe.md) - View collection description. +* [defradb client collection docIDs](defradb_client_collection_docIDs.md) - List all document IDs (docIDs). * [defradb client collection get](defradb_client_collection_get.md) - View document fields. -* [defradb client collection keys](defradb_client_collection_keys.md) - List all document keys. -* [defradb client collection update](defradb_client_collection_update.md) - Update documents by key or filter. +* [defradb client collection update](defradb_client_collection_update.md) - Update documents by docID or filter. diff --git a/docs/cli/defradb_client_collection_delete.md b/docs/cli/defradb_client_collection_delete.md index fea6c6ccc7..30676654d5 100644 --- a/docs/cli/defradb_client_collection_delete.md +++ b/docs/cli/defradb_client_collection_delete.md @@ -1,28 +1,28 @@ ## defradb client collection delete -Delete documents by key or filter. +Delete documents by docID or filter. ### Synopsis -Delete documents by key or filter and lists the number of documents deleted. +Delete documents by docID or filter and lists the number of documents deleted. -Example: delete by key(s) - defradb client collection delete --name User --key bae-123,bae-456 +Example: delete by docID(s) + defradb client collection delete --name User --docID bae-123,bae-456 Example: delete by filter defradb client collection delete --name User --filter '{ "_gte": { "points": 100 } }' ``` -defradb client collection delete [--filter --key ] [flags] +defradb client collection delete [--filter --docID ] [flags] ``` ### Options ``` + --docID strings Document ID --filter string Document filter -h, --help help for delete - --key strings Document key ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_collection_keys.md b/docs/cli/defradb_client_collection_docIDs.md similarity index 83% rename from docs/cli/defradb_client_collection_keys.md rename to docs/cli/defradb_client_collection_docIDs.md index 234d8e051f..5ca8444e2e 100644 --- a/docs/cli/defradb_client_collection_keys.md +++ b/docs/cli/defradb_client_collection_docIDs.md @@ -1,23 +1,23 @@ -## defradb client collection keys +## defradb client collection docIDs -List all document keys. +List all document IDs (docIDs). ### Synopsis -List all document keys. +List all document IDs (docIDs). Example: - defradb client collection keys --name User + defradb client collection docIDs --name User ``` -defradb client collection keys [flags] +defradb client collection docIDs [flags] ``` ### Options ``` - -h, --help help for keys + -h, --help help for docIDs ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_collection_get.md b/docs/cli/defradb_client_collection_get.md index 675988c487..3f60490272 100644 --- a/docs/cli/defradb_client_collection_get.md +++ b/docs/cli/defradb_client_collection_get.md @@ -11,7 +11,7 @@ Example: ``` -defradb client collection get [--show-deleted] [flags] +defradb client collection get [--show-deleted] [flags] ``` ### Options diff --git a/docs/cli/defradb_client_collection_update.md b/docs/cli/defradb_client_collection_update.md index c081614cce..4ba111f025 100644 --- a/docs/cli/defradb_client_collection_update.md +++ b/docs/cli/defradb_client_collection_update.md @@ -1,33 +1,33 @@ ## defradb client collection update -Update documents by key or filter. +Update documents by docID or filter. ### Synopsis -Update documents by key or filter. +Update documents by docID or filter. Example: update from string - defradb client collection update --name User --key bae-123 '{ "name": "Bob" }' + defradb client collection update --name User --docID bae-123 '{ "name": "Bob" }' Example: update by filter defradb client collection update --name User \ --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' -Example: update by keys +Example: update by docIDs defradb client collection update --name User \ - --key bae-123,bae-456 --updater '{ "verified": true }' + --docID bae-123,bae-456 --updater '{ "verified": true }' ``` -defradb client collection update [--filter --key --updater ] [flags] +defradb client collection update [--filter --docID --updater ] [flags] ``` ### Options ``` + --docID strings Document ID --filter string Document filter -h, --help help for update - --key strings Document key --updater string Document updater ``` diff --git a/docs/cli/defradb_client_document.md b/docs/cli/defradb_client_document.md deleted file mode 100644 index bc527357e7..0000000000 --- a/docs/cli/defradb_client_document.md +++ /dev/null @@ -1,38 +0,0 @@ -## defradb client document - -Create, read, update, and delete documents. - -### Synopsis - -Create, read, update, and delete documents. - -### Options - -``` - -h, --help help for document -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client](defradb_client.md) - Interact with a DefraDB node -* [defradb client document create](defradb_client_document_create.md) - Create a new document. -* [defradb client document delete](defradb_client_document_delete.md) - Delete documents by key or filter. -* [defradb client document get](defradb_client_document_get.md) - View detailed document info. -* [defradb client document keys](defradb_client_document_keys.md) - List all collection document keys. -* [defradb client document save](defradb_client_document_save.md) - Create or update a document. -* [defradb client document update](defradb_client_document_update.md) - Update documents by key or filter. - diff --git a/docs/cli/defradb_client_document_create.md b/docs/cli/defradb_client_document_create.md deleted file mode 100644 index 99dbd0d7f5..0000000000 --- a/docs/cli/defradb_client_document_create.md +++ /dev/null @@ -1,44 +0,0 @@ -## defradb client document create - -Create a new document. - -### Synopsis - -Create a new document. - -Example: create document - defradb client document create --collection User '{ "name": "Bob" }' - -Example: create documents - defradb client document create --collection User '[{ "name": "Alice" }, { "name": "Bob" }]' - - -``` -defradb client document create --collection [flags] -``` - -### Options - -``` - -c, --collection string Collection name - -h, --help help for create -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_document_delete.md b/docs/cli/defradb_client_document_delete.md deleted file mode 100644 index 96a0b1e973..0000000000 --- a/docs/cli/defradb_client_document_delete.md +++ /dev/null @@ -1,46 +0,0 @@ -## defradb client document delete - -Delete documents by key or filter. - -### Synopsis - -Delete documents by key or filter and lists the number of documents deleted. - -Example: delete by key(s) - defradb client document delete --collection User --key bae-123,bae-456 - -Example: delete by filter - defradb client document delete --collection User --filter '{ "_gte": { "points": 100 } }' - - -``` -defradb client document delete --collection [--filter --key ] [flags] -``` - -### Options - -``` - -c, --collection string Collection name - --filter string Document filter - -h, --help help for delete - --key strings Document key -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_document_save.md b/docs/cli/defradb_client_document_save.md deleted file mode 100644 index 41f59a860c..0000000000 --- a/docs/cli/defradb_client_document_save.md +++ /dev/null @@ -1,42 +0,0 @@ -## defradb client document save - -Create or update a document. - -### Synopsis - -Create or update a document. - -Example: - defradb client document save --collection User --key bae-123 '{ "name": "Bob" }' - - -``` -defradb client document save --collection --key [flags] -``` - -### Options - -``` - -c, --collection string Collection name - -h, --help help for save - --key string Document key -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_document_update.md b/docs/cli/defradb_client_document_update.md deleted file mode 100644 index 3efc67ebf0..0000000000 --- a/docs/cli/defradb_client_document_update.md +++ /dev/null @@ -1,52 +0,0 @@ -## defradb client document update - -Update documents by key or filter. - -### Synopsis - -Update documents by key or filter. - -Example: - defradb client document update --collection User --key bae-123 '{ "name": "Bob" }' - -Example: update by filter - defradb client document update --collection User \ - --filter '{ "_gte": { "points": 100 } }' --updater '{ "verified": true }' - -Example: update by keys - defradb client document update --collection User \ - --key bae-123,bae-456 --updater '{ "verified": true }' - - -``` -defradb client document update --collection [--filter --key --updater ] [flags] -``` - -### Options - -``` - -c, --collection string Collection name - --filter string Document filter - -h, --help help for update - --key strings Document key - --updater string Document updater -``` - -### Options inherited from parent commands - -``` - --logformat string Log format to use. Options are csv, json (default "csv") - --logger stringArray Override logger parameters. Usage: --logger ,level=,output=,... - --loglevel string Log level to use. Options are debug, info, error, fatal (default "info") - --lognocolor Disable colored log output - --logoutput string Log output path (default "stderr") - --logtrace Include stacktrace in error and fatal logs - --rootdir string Directory for data and configuration to use (default: $HOME/.defradb) - --tx uint Transaction ID - --url string URL of HTTP endpoint to listen on or connect to (default "localhost:9181") -``` - -### SEE ALSO - -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. - diff --git a/docs/cli/defradb_client_index_create.md b/docs/cli/defradb_client_index_create.md index 96b6418440..e434cb91cd 100644 --- a/docs/cli/defradb_client_index_create.md +++ b/docs/cli/defradb_client_index_create.md @@ -7,6 +7,7 @@ Creates a secondary index on a collection's field(s) Creates a secondary index on a collection's field(s). The --name flag is optional. If not provided, a name will be generated automatically. +The --unique flag is optional. If provided, the index will be unique. Example: create an index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name @@ -15,7 +16,7 @@ Example: create a named index for 'Users' collection on 'name' field: defradb client index create --collection Users --fields name --name UsersByName ``` -defradb client index create -c --collection --fields [-n --name ] [flags] +defradb client index create -c --collection --fields [-n --name ] [--unique] [flags] ``` ### Options @@ -25,6 +26,7 @@ defradb client index create -c --collection --fields [-n - --fields strings Fields to index -h, --help help for create -n, --name string Index name + -u, --unique Make the index unique ``` ### Options inherited from parent commands diff --git a/docs/cli/defradb_client_document_get.md b/docs/cli/defradb_client_view.md similarity index 63% rename from docs/cli/defradb_client_document_get.md rename to docs/cli/defradb_client_view.md index 600712ec0b..c3aaf4a69f 100644 --- a/docs/cli/defradb_client_document_get.md +++ b/docs/cli/defradb_client_view.md @@ -1,25 +1,15 @@ -## defradb client document get +## defradb client view -View detailed document info. +Manage views within a running DefraDB instance ### Synopsis -View detailed document info. - -Example: - defradb client document get --collection User bae-123 - - -``` -defradb client document get --collection [--show-deleted] [flags] -``` +Manage (add) views withing a running DefraDB instance ### Options ``` - -c, --collection string Collection name - -h, --help help for get - --show-deleted Show deleted documents + -h, --help help for view ``` ### Options inherited from parent commands @@ -38,5 +28,6 @@ defradb client document get --collection [--show-deleted] ### SEE ALSO -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. +* [defradb client](defradb_client.md) - Interact with a DefraDB node +* [defradb client view add](defradb_client_view_add.md) - Add new view diff --git a/docs/cli/defradb_client_document_keys.md b/docs/cli/defradb_client_view_add.md similarity index 66% rename from docs/cli/defradb_client_document_keys.md rename to docs/cli/defradb_client_view_add.md index e436f4df6b..caac7d862a 100644 --- a/docs/cli/defradb_client_document_keys.md +++ b/docs/cli/defradb_client_view_add.md @@ -1,24 +1,24 @@ -## defradb client document keys +## defradb client view add -List all collection document keys. +Add new view ### Synopsis -List all collection document keys. - -Example: - defradb client document keys --collection User keys - +Add new database view. + +Example: add from an argument string: + defradb client view add 'Foo { name, ...}' 'type Foo { ... }' + +Learn more about the DefraDB GraphQL Schema Language on https://docs.source.network. ``` -defradb client document keys --collection [flags] +defradb client view add [query] [sdl] [flags] ``` ### Options ``` - -c, --collection string Collection name - -h, --help help for keys + -h, --help help for add ``` ### Options inherited from parent commands @@ -37,5 +37,5 @@ defradb client document keys --collection [flags] ### SEE ALSO -* [defradb client document](defradb_client_document.md) - Create, read, update, and delete documents. +* [defradb client view](defradb_client_view.md) - Manage views within a running DefraDB instance diff --git a/docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md b/docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md new file mode 100644 index 0000000000..bf6a541977 --- /dev/null +++ b/docs/data_format_changes/i1749-rename-key-to-doc-id-terminology.md @@ -0,0 +1,7 @@ +# Rename _key to _docID + +Rename from using older terminology of `_key` to `_docID`, the update is done to all documentation, and throughout the codebase. + +- All instances of `(k|K)ey(s|S)` and `(d|D)ockey(s|s)` should have been updated to use the term `(d|D)ocID(s)` instead. + +- Usage of id/ids argument has also been updated with docID/docIDs diff --git a/events/db_update.go b/events/db_update.go index d9479656a3..a6865b8707 100644 --- a/events/db_update.go +++ b/events/db_update.go @@ -25,7 +25,7 @@ var EmptyUpdateChannel = immutable.None[Channel[Update]]() // UpdateEvent represents a new DAG node added to the append-only MerkleCRDT Clock graph // of a document or sub-field. type Update struct { - DocKey string + DocID string Cid cid.Cid SchemaRoot string Block ipld.Node diff --git a/examples/request/user_creation.graphql b/examples/request/user_creation.graphql index 915285bb10..0cab4c6d45 100644 --- a/examples/request/user_creation.graphql +++ b/examples/request/user_creation.graphql @@ -1,5 +1,5 @@ mutation { create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { - _key + _docID } -} \ No newline at end of file +} diff --git a/examples/request/user_query.graphql b/examples/request/user_query.graphql index ffdc86b259..5d38670c23 100644 --- a/examples/request/user_query.graphql +++ b/examples/request/user_query.graphql @@ -4,6 +4,6 @@ query { age verified points - _key + _docID } -} \ No newline at end of file +} diff --git a/http/client_collection.go b/http/client_collection.go index 35ca21ce4f..36b99cd9f2 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -62,9 +62,9 @@ func (c *Collection) Definition() client.CollectionDefinition { func (c *Collection) Create(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name) - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -90,9 +90,9 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er var docMapList []json.RawMessage for _, doc := range docs { - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -122,7 +122,7 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er } func (c *Collection) Update(ctx context.Context, doc *client.Document) error { - methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, doc.Key().String()) + methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, doc.ID().String()) body, err := doc.ToJSONPatch() if err != nil { @@ -141,7 +141,7 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { } func (c *Collection) Save(ctx context.Context, doc *client.Document) error { - _, err := c.Get(ctx, doc.Key(), true) + _, err := c.Get(ctx, doc.ID(), true) if err == nil { return c.Update(ctx, doc) } @@ -151,8 +151,8 @@ func (c *Collection) Save(ctx context.Context, doc *client.Document) error { return err } -func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { - methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docKey.String()) +func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { + methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docID.String()) req, err := http.NewRequestWithContext(ctx, http.MethodDelete, methodURL.String(), nil) if err != nil { @@ -165,8 +165,8 @@ func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, er return true, nil } -func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { - _, err := c.Get(ctx, docKey, false) +func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { + _, err := c.Get(ctx, docID, false) if err != nil { return false, err } @@ -177,10 +177,10 @@ func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) - case client.DocKey: - return c.UpdateWithKey(ctx, t, updater) - case []client.DocKey: - return c.UpdateWithKeys(ctx, t, updater) + case client.DocID: + return c.UpdateWithDocID(ctx, t, updater) + case []client.DocID: + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -218,28 +218,28 @@ func (c *Collection) UpdateWithFilter( }) } -func (c *Collection) UpdateWithKey( +func (c *Collection) UpdateWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { return c.updateWith(ctx, CollectionUpdateRequest{ - Key: key.String(), + DocID: docID.String(), Updater: updater, }) } -func (c *Collection) UpdateWithKeys( +func (c *Collection) UpdateWithDocIDs( ctx context.Context, - docKeys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { - var keys []string - for _, key := range docKeys { - keys = append(keys, key.String()) + var strDocIDs []string + for _, docID := range docIDs { + strDocIDs = append(strDocIDs, docID.String()) } return c.updateWith(ctx, CollectionUpdateRequest{ - Keys: keys, + DocIDs: strDocIDs, Updater: updater, }) } @@ -248,10 +248,10 @@ func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.Delete switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) - case client.DocKey: - return c.DeleteWithKey(ctx, t) - case []client.DocKey: - return c.DeleteWithKeys(ctx, t) + case client.DocID: + return c.DeleteWithDocID(ctx, t) + case []client.DocID: + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -284,29 +284,29 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. }) } -func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { return c.deleteWith(ctx, CollectionDeleteRequest{ - Key: docKey.String(), + DocID: docID.String(), }) } -func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { - var keys []string - for _, key := range docKeys { - keys = append(keys, key.String()) +func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { + var strDocIDs []string + for _, docID := range docIDs { + strDocIDs = append(strDocIDs, docID.String()) } return c.deleteWith(ctx, CollectionDeleteRequest{ - Keys: keys, + DocIDs: strDocIDs, }) } -func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { query := url.Values{} if showDeleted { query.Add("show_deleted", "true") } - methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, key.String()) + methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name, docID.String()) methodURL.RawQuery = query.Encode() req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -332,7 +332,7 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } } -func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name) req, err := http.NewRequestWithContext(ctx, http.MethodGet, methodURL.String(), nil) @@ -345,7 +345,7 @@ func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysRe if err != nil { return nil, err } - docKeyCh := make(chan client.DocKeysResult) + docIDCh := make(chan client.DocIDResult) go func() { eventReader := sse.NewReadCloser(res.Body) @@ -353,32 +353,32 @@ func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysRe // and body of the request are already // checked and it cannot be handled properly defer eventReader.Close() //nolint:errcheck - defer close(docKeyCh) + defer close(docIDCh) for { evt, err := eventReader.Next() if err != nil { return } - var res DocKeyResult + var res DocIDResult if err := json.Unmarshal(evt.Data, &res); err != nil { return } - key, err := client.NewDocKeyFromString(res.Key) + docID, err := client.NewDocIDFromString(res.DocID) if err != nil { return } - docKey := client.DocKeysResult{ - Key: key, + docIDResult := client.DocIDResult{ + ID: docID, } if res.Error != "" { - docKey.Err = fmt.Errorf(res.Error) + docIDResult.Err = fmt.Errorf(res.Error) } - docKeyCh <- docKey + docIDCh <- docIDResult } }() - return docKeyCh, nil + return docIDCh, nil } func (c *Collection) CreateIndex( diff --git a/http/errors.go b/http/errors.go index dae6a2d863..b78771723f 100644 --- a/http/errors.go +++ b/http/errors.go @@ -26,18 +26,8 @@ const ( // Errors returned from this package may be tested against these errors with errors.Is. var ( ErrNoListener = errors.New("cannot serve with no listener") - ErrSchema = errors.New("base must start with the http or https scheme") - ErrDatabaseNotAvailable = errors.New("no database available") - ErrFormNotSupported = errors.New("content type application/x-www-form-urlencoded not yet supported") - ErrBodyEmpty = errors.New("body cannot be empty") - ErrMissingGQLRequest = errors.New("missing GraphQL request") - ErrPeerIdUnavailable = errors.New("no PeerID available. P2P might be disabled") - ErrStreamingUnsupported = errors.New("streaming unsupported") ErrNoEmail = errors.New("email address must be specified for tls with autocert") - ErrPayloadFormat = errors.New("invalid payload format") - ErrMissingNewKey = errors.New("missing _newKey for imported doc") ErrInvalidRequestBody = errors.New("invalid request body") - ErrDocKeyDoesNotMatch = errors.New("document key does not match") ErrStreamingNotSupported = errors.New("streaming not supported") ErrMigrationNotFound = errors.New("migration not found") ErrMissingRequest = errors.New("missing request") diff --git a/http/handler_collection.go b/http/handler_collection.go index 69f08d7073..87a47e1ad2 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -26,14 +26,14 @@ import ( type collectionHandler struct{} type CollectionDeleteRequest struct { - Key string `json:"key"` - Keys []string `json:"keys"` + DocID string `json:"docID"` + DocIDs []string `json:"docIDs"` Filter any `json:"filter"` } type CollectionUpdateRequest struct { - Key string `json:"key"` - Keys []string `json:"keys"` + DocID string `json:"docID"` + DocIDs []string `json:"docIDs"` Filter any `json:"filter"` Updater string `json:"updater"` } @@ -101,29 +101,29 @@ func (s *collectionHandler) DeleteWith(rw http.ResponseWriter, req *http.Request return } responseJSON(rw, http.StatusOK, result) - case request.Key != "": - docKey, err := client.NewDocKeyFromString(request.Key) + case request.DocID != "": + docID, err := client.NewDocIDFromString(request.DocID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.DeleteWith(req.Context(), docKey) + result, err := col.DeleteWith(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) - case request.Keys != nil: - var docKeys []client.DocKey - for _, key := range request.Keys { - docKey, err := client.NewDocKeyFromString(key) + case request.DocIDs != nil: + var docIDs []client.DocID + for _, docIDStr := range request.DocIDs { + docID, err := client.NewDocIDFromString(docIDStr) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - docKeys = append(docKeys, docKey) + docIDs = append(docIDs, docID) } - result, err := col.DeleteWith(req.Context(), docKeys) + result, err := col.DeleteWith(req.Context(), docIDs) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -151,29 +151,29 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request return } responseJSON(rw, http.StatusOK, result) - case request.Key != "": - docKey, err := client.NewDocKeyFromString(request.Key) + case request.DocID != "": + docID, err := client.NewDocIDFromString(request.DocID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - result, err := col.UpdateWith(req.Context(), docKey, request.Updater) + result, err := col.UpdateWith(req.Context(), docID, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } responseJSON(rw, http.StatusOK, result) - case request.Keys != nil: - var docKeys []client.DocKey - for _, key := range request.Keys { - docKey, err := client.NewDocKeyFromString(key) + case request.DocIDs != nil: + var docIDs []client.DocID + for _, docIDStr := range request.DocIDs { + docID, err := client.NewDocIDFromString(docIDStr) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - docKeys = append(docKeys, docKey) + docIDs = append(docIDs, docID) } - result, err := col.UpdateWith(req.Context(), docKeys, request.Updater) + result, err := col.UpdateWith(req.Context(), docIDs, request.Updater) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -187,12 +187,12 @@ func (s *collectionHandler) UpdateWith(rw http.ResponseWriter, req *http.Request func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) - docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - doc, err := col.Get(req.Context(), docKey, true) + doc, err := col.Get(req.Context(), docID, true) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -217,12 +217,12 @@ func (s *collectionHandler) Update(rw http.ResponseWriter, req *http.Request) { func (s *collectionHandler) Delete(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) - docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - _, err = col.Delete(req.Context(), docKey) + _, err = col.Delete(req.Context(), docID) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -234,12 +234,12 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) showDeleted, _ := strconv.ParseBool(req.URL.Query().Get("show_deleted")) - docKey, err := client.NewDocKeyFromString(chi.URLParam(req, "key")) + docID, err := client.NewDocIDFromString(chi.URLParam(req, "docID")) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - doc, err := col.Get(req.Context(), docKey, showDeleted) + doc, err := col.Get(req.Context(), docID, showDeleted) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -252,12 +252,12 @@ func (s *collectionHandler) Get(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusOK, docMap) } -type DocKeyResult struct { - Key string `json:"key"` +type DocIDResult struct { + DocID string `json:"docID"` Error string `json:"error"` } -func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Request) { +func (s *collectionHandler) GetAllDocIDs(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) flusher, ok := rw.(http.Flusher) @@ -266,7 +266,7 @@ func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Requ return } - docKeyCh, err := col.GetAllDocKeys(req.Context()) + docIDsResult, err := col.GetAllDocIDs(req.Context()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -279,12 +279,12 @@ func (s *collectionHandler) GetAllDocKeys(rw http.ResponseWriter, req *http.Requ rw.WriteHeader(http.StatusOK) flusher.Flush() - for docKey := range docKeyCh { - results := &DocKeyResult{ - Key: docKey.Key.String(), + for docID := range docIDsResult { + results := &DocIDResult{ + DocID: docID.ID.String(), } - if docKey.Err != nil { - results.Error = docKey.Err.Error() + if docID.Err != nil { + results.Error = docID.Err.Error() } data, err := json.Marshal(results) if err != nil { @@ -478,7 +478,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { dropIndex.Responses["200"] = successResponse dropIndex.Responses["400"] = errorResponse - documentKeyPathParam := openapi3.NewPathParameter("key"). + documentIDPathParam := openapi3.NewPathParameter("docID"). WithRequired(true). WithSchema(openapi3.NewStringSchema()) @@ -487,51 +487,51 @@ func (h *collectionHandler) bindRoutes(router *Router) { WithJSONSchemaRef(documentSchema) collectionGet := openapi3.NewOperation() - collectionGet.Description = "Get a document by key" + collectionGet.Description = "Get a document by docID" collectionGet.OperationID = "collection_get" collectionGet.Tags = []string{"collection"} collectionGet.AddParameter(collectionNamePathParam) - collectionGet.AddParameter(documentKeyPathParam) + collectionGet.AddParameter(documentIDPathParam) collectionGet.AddResponse(200, collectionGetResponse) collectionGet.Responses["400"] = errorResponse collectionUpdate := openapi3.NewOperation() - collectionUpdate.Description = "Update a document by key" + collectionUpdate.Description = "Update a document by docID" collectionUpdate.OperationID = "collection_update" collectionUpdate.Tags = []string{"collection"} collectionUpdate.AddParameter(collectionNamePathParam) - collectionUpdate.AddParameter(documentKeyPathParam) + collectionUpdate.AddParameter(documentIDPathParam) collectionUpdate.Responses = make(openapi3.Responses) collectionUpdate.Responses["200"] = successResponse collectionUpdate.Responses["400"] = errorResponse collectionDelete := openapi3.NewOperation() - collectionDelete.Description = "Delete a document by key" + collectionDelete.Description = "Delete a document by docID" collectionDelete.OperationID = "collection_delete" collectionDelete.Tags = []string{"collection"} collectionDelete.AddParameter(collectionNamePathParam) - collectionDelete.AddParameter(documentKeyPathParam) + collectionDelete.AddParameter(documentIDPathParam) collectionDelete.Responses = make(openapi3.Responses) collectionDelete.Responses["200"] = successResponse collectionDelete.Responses["400"] = errorResponse collectionKeys := openapi3.NewOperation() collectionKeys.AddParameter(collectionNamePathParam) - collectionKeys.Description = "Get all document keys" + collectionKeys.Description = "Get all document IDs" collectionKeys.OperationID = "collection_keys" collectionKeys.Tags = []string{"collection"} collectionKeys.Responses = make(openapi3.Responses) collectionKeys.Responses["200"] = successResponse collectionKeys.Responses["400"] = errorResponse - router.AddRoute("/collections/{name}", http.MethodGet, collectionKeys, h.GetAllDocKeys) + router.AddRoute("/collections/{name}", http.MethodGet, collectionKeys, h.GetAllDocIDs) router.AddRoute("/collections/{name}", http.MethodPost, collectionCreate, h.Create) router.AddRoute("/collections/{name}", http.MethodPatch, collectionUpdateWith, h.UpdateWith) router.AddRoute("/collections/{name}", http.MethodDelete, collectionDeleteWith, h.DeleteWith) router.AddRoute("/collections/{name}/indexes", http.MethodPost, createIndex, h.CreateIndex) router.AddRoute("/collections/{name}/indexes", http.MethodGet, getIndexes, h.GetIndexes) router.AddRoute("/collections/{name}/indexes/{index}", http.MethodDelete, dropIndex, h.DropIndex) - router.AddRoute("/collections/{name}/{key}", http.MethodGet, collectionGet, h.Get) - router.AddRoute("/collections/{name}/{key}", http.MethodPatch, collectionUpdate, h.Update) - router.AddRoute("/collections/{name}/{key}", http.MethodDelete, collectionDelete, h.Delete) + router.AddRoute("/collections/{name}/{docID}", http.MethodGet, collectionGet, h.Get) + router.AddRoute("/collections/{name}/{docID}", http.MethodPatch, collectionUpdate, h.Update) + router.AddRoute("/collections/{name}/{docID}", http.MethodDelete, collectionDelete, h.Delete) } diff --git a/lens/fetcher.go b/lens/fetcher.go index 9186adbb7c..71f5b6243a 100644 --- a/lens/fetcher.go +++ b/lens/fetcher.go @@ -192,7 +192,7 @@ func encodedDocToLensDoc(doc fetcher.EncodedDocument) (LensDoc, error) { for field, fieldValue := range properties { docAsMap[field.Name] = fieldValue } - docAsMap[request.KeyFieldName] = string(doc.Key()) + docAsMap[request.DocIDFieldName] = string(doc.ID()) // Note: client.Document does not have a means of flagging as to whether it is // deleted or not, and, currently the fetcher does not ever returned deleted items @@ -207,7 +207,7 @@ func (f *lensedFetcher) lensDocToEncodedDoc(docAsMap LensDoc) (fetcher.EncodedDo properties := map[client.FieldDescription]any{} for fieldName, fieldByteValue := range docAsMap { - if fieldName == request.KeyFieldName { + if fieldName == request.DocIDFieldName { key = fieldByteValue.(string) continue } @@ -277,14 +277,14 @@ func (f *lensedFetcher) updateDataStore(ctx context.Context, original map[string } } - dockey, ok := original[request.KeyFieldName].(string) + docID, ok := original[request.DocIDFieldName].(string) if !ok { return core.ErrInvalidKey } datastoreKeyBase := core.DataStoreKey{ CollectionID: f.col.Description().IDString(), - DocKey: dockey, + DocID: docID, InstanceType: core.ValueKey, } @@ -326,7 +326,7 @@ type lensEncodedDocument struct { var _ fetcher.EncodedDocument = (*lensEncodedDocument)(nil) -func (encdoc *lensEncodedDocument) Key() []byte { +func (encdoc *lensEncodedDocument) ID() []byte { return encdoc.key } diff --git a/merkle/clock/clock_test.go b/merkle/clock/clock_test.go index a804165062..311d990952 100644 --- a/merkle/clock/clock_test.go +++ b/merkle/clock/clock_test.go @@ -17,6 +17,7 @@ import ( cid "github.com/ipfs/go-cid" ds "github.com/ipfs/go-datastore" + "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" ccid "github.com/sourcenetwork/defradb/core/cid" "github.com/sourcenetwork/defradb/core/crdt" @@ -32,7 +33,12 @@ func newTestMerkleClock() *MerkleClock { multistore := datastore.MultiStoreFrom(s) reg := crdt.NewLWWRegister(multistore.Rootstore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "") - return NewMerkleClock(multistore.Headstore(), multistore.DAGstore(), core.HeadStoreKey{DocKey: "dockey", FieldId: "1"}, reg).(*MerkleClock) + return NewMerkleClock( + multistore.Headstore(), + multistore.DAGstore(), + core.HeadStoreKey{DocID: request.DocIDArgName, FieldId: "1"}, + reg, + ).(*MerkleClock) } func TestNewMerkleClock(t *testing.T) { diff --git a/merkle/clock/heads_test.go b/merkle/clock/heads_test.go index a857571515..18db117ebb 100644 --- a/merkle/clock/heads_test.go +++ b/merkle/clock/heads_test.go @@ -45,7 +45,7 @@ func newHeadSet() *heads { return NewHeadSet( datastore.AsDSReaderWriter(s), - core.HeadStoreKey{}.WithDocKey("mydockey").WithFieldId("1"), + core.HeadStoreKey{}.WithDocID("myDocID").WithFieldId("1"), ) } diff --git a/net/client.go b/net/client.go index 947495c5e1..20c33e33fd 100644 --- a/net/client.go +++ b/net/client.go @@ -36,12 +36,12 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er log.Debug( ctx, "Preparing pushLog request", - logging.NewKV("DocKey", evt.DocKey), + logging.NewKV("DocID", evt.DocID), logging.NewKV("CID", evt.Cid), logging.NewKV("SchemaRoot", evt.SchemaRoot)) body := &pb.PushLogRequest_Body{ - DocKey: []byte(evt.DocKey), + DocID: []byte(evt.DocID), Cid: evt.Cid.Bytes(), SchemaRoot: []byte(evt.SchemaRoot), Creator: s.peer.host.ID().String(), @@ -55,7 +55,7 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er log.Debug( ctx, "Pushing log", - logging.NewKV("DocKey", evt.DocKey), + logging.NewKV("DocID", evt.DocID), logging.NewKV("CID", evt.Cid), logging.NewKV("PeerID", pid), ) @@ -72,7 +72,7 @@ func (s *server) pushLog(ctx context.Context, evt events.Update, pid peer.ID) er return NewErrPushLog( err, errors.NewKV("CID", evt.Cid), - errors.NewKV("DocKey", evt.DocKey), + errors.NewKV("DocID", evt.DocID), errors.NewKV("PeerID", pid), ) } diff --git a/net/client_test.go b/net/client_test.go index df07e00c34..7eba460b95 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -40,7 +40,7 @@ func TestPushlogWithDialFailure(t *testing.T) { ) err = n.server.pushLog(ctx, events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: cid, SchemaRoot: "test", Block: &EmptyNode{}, @@ -61,7 +61,7 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { require.NoError(t, err) err = n.server.pushLog(ctx, events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: cid, SchemaRoot: "test", Block: &EmptyNode{}, @@ -109,7 +109,7 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { require.NoError(t, err) err = n1.server.pushLog(ctx, events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: cid, SchemaRoot: col.SchemaRoot(), Block: &EmptyNode{}, diff --git a/net/dag.go b/net/dag.go index 1760864db4..f083904915 100644 --- a/net/dag.go +++ b/net/dag.go @@ -77,20 +77,20 @@ func (p *Peer) sendJobWorker() { return case newJob := <-p.sendJobs: - jobs, ok := docWorkerQueue[newJob.bp.dsKey.DocKey] + jobs, ok := docWorkerQueue[newJob.bp.dsKey.DocID] if !ok { jobs = make(chan *dagJob, numWorkers) for i := 0; i < numWorkers; i++ { go p.dagWorker(jobs) } - docWorkerQueue[newJob.bp.dsKey.DocKey] = jobs + docWorkerQueue[newJob.bp.dsKey.DocID] = jobs } jobs <- newJob - case dockey := <-p.closeJob: - if jobs, ok := docWorkerQueue[dockey]; ok { + case docID := <-p.closeJob: + if jobs, ok := docWorkerQueue[docID]; ok { close(jobs) - delete(docWorkerQueue, dockey) + delete(docWorkerQueue, docID) } } } diff --git a/net/dag_test.go b/net/dag_test.go index 6f0145b0ae..fc46b6a96c 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -62,7 +62,7 @@ func TestSendJobWorker_WithNewJob_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - dsKey := core.DataStoreKeyFromDocKey(doc.Key()) + dsKey := core.DataStoreKeyFromDocID(doc.ID()) txn, err := db.NewTxn(ctx, false) require.NoError(t, err) @@ -103,7 +103,7 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - dsKey := core.DataStoreKeyFromDocKey(doc.Key()) + dsKey := core.DataStoreKeyFromDocID(doc.ID()) txn, err := db.NewTxn(ctx, false) require.NoError(t, err) @@ -119,7 +119,7 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { }, } - n.closeJob <- dsKey.DocKey + n.closeJob <- dsKey.DocID n.Close() select { @@ -164,7 +164,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - dsKey := core.DataStoreKeyFromDocKey(doc.Key()) + dsKey := core.DataStoreKeyFromDocID(doc.ID()) err = col.Create(ctx, doc) require.NoError(t, err) diff --git a/net/doc.go b/net/doc.go index dd80ee53a4..57cd6cd7fc 100644 --- a/net/doc.go +++ b/net/doc.go @@ -13,7 +13,7 @@ /* Package net provides P2P network functions for the core DefraDB instance. -Notable design descision: all DocKeys (Documents) have their own respective PubSub topics. +Notable design descision: all DocIDs (Documents) have their own respective PubSub topics. The Peer object encapsulates an instanciated DB objects, libp2p host object, libp2p DAGService. Peer is responsible for storing all network related meta-data, maintaining open connections, pubsub mechanics, etc. diff --git a/net/errors.go b/net/errors.go index e9ac8fc748..1ca2d857d5 100644 --- a/net/errors.go +++ b/net/errors.go @@ -20,11 +20,11 @@ import ( const ( errPushLog = "failed to push log" - errFailedToGetDockey = "failed to get DocKey from broadcast message" - errPublishingToDockeyTopic = "can't publish log %s for dockey %s" + errFailedToGetDocID = "failed to get DocID from broadcast message" + errPublishingToDocIDTopic = "can't publish log %s for docID %s" errPublishingToSchemaTopic = "can't publish log %s for schema %s" errReplicatorExists = "replicator already exists for %s with peerID %s" - errReplicatorDocKey = "failed to get dockey for replicator %s with peerID %s" + errReplicatorDocID = "failed to get docID for replicator %s with peerID %s" errReplicatorCollections = "failed to get collections for replicator" ) @@ -41,24 +41,24 @@ func NewErrPushLog(inner error, kv ...errors.KV) error { return errors.Wrap(errPushLog, inner, kv...) } -func NewErrFailedToGetDockey(inner error, kv ...errors.KV) error { - return errors.Wrap(errFailedToGetDockey, inner, kv...) +func NewErrFailedToGetDocID(inner error, kv ...errors.KV) error { + return errors.Wrap(errFailedToGetDocID, inner, kv...) } -func NewErrPublishingToDockeyTopic(inner error, cid, key string, kv ...errors.KV) error { - return errors.Wrap(fmt.Sprintf(errPublishingToDockeyTopic, cid, key), inner, kv...) +func NewErrPublishingToDocIDTopic(inner error, cid, docID string, kv ...errors.KV) error { + return errors.Wrap(fmt.Sprintf(errPublishingToDocIDTopic, cid, docID), inner, kv...) } -func NewErrPublishingToSchemaTopic(inner error, cid, key string, kv ...errors.KV) error { - return errors.Wrap(fmt.Sprintf(errPublishingToSchemaTopic, cid, key), inner, kv...) +func NewErrPublishingToSchemaTopic(inner error, cid, docID string, kv ...errors.KV) error { + return errors.Wrap(fmt.Sprintf(errPublishingToSchemaTopic, cid, docID), inner, kv...) } func NewErrReplicatorExists(collection string, peerID peer.ID, kv ...errors.KV) error { return errors.New(fmt.Sprintf(errReplicatorExists, collection, peerID), kv...) } -func NewErrReplicatorDocKey(inner error, collection string, peerID peer.ID, kv ...errors.KV) error { - return errors.Wrap(fmt.Sprintf(errReplicatorDocKey, collection, peerID), inner, kv...) +func NewErrReplicatorDocID(inner error, collection string, peerID peer.ID, kv ...errors.KV) error { + return errors.Wrap(fmt.Sprintf(errReplicatorDocID, collection, peerID), inner, kv...) } func NewErrReplicatorCollections(inner error, kv ...errors.KV) error { diff --git a/net/pb/net.pb.go b/net/pb/net.pb.go index 92eaafa5be..a9b5a2162d 100644 --- a/net/pb/net.pb.go +++ b/net/pb/net.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.31.0 -// protoc v3.12.4 +// protoc v4.25.1 // source: net.proto package net_pb @@ -27,7 +27,7 @@ type Document struct { unknownFields protoimpl.UnknownFields // ID of the document. - DocKey []byte `protobuf:"bytes,1,opt,name=docKey,proto3" json:"docKey,omitempty"` + DocID []byte `protobuf:"bytes,1,opt,name=docID,proto3" json:"docID,omitempty"` // head of the log. Head []byte `protobuf:"bytes,4,opt,name=head,proto3" json:"head,omitempty"` } @@ -64,9 +64,9 @@ func (*Document) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{0} } -func (x *Document) GetDocKey() []byte { +func (x *Document) GetDocID() []byte { if x != nil { - return x.DocKey + return x.DocID } return nil } @@ -521,8 +521,8 @@ type PushLogRequest_Body struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - // docKey is the DocKey of the document that is affected by the log. - DocKey []byte `protobuf:"bytes,1,opt,name=docKey,proto3" json:"docKey,omitempty"` + // docID is the ID of the document that is affected by the log. + DocID []byte `protobuf:"bytes,1,opt,name=docID,proto3" json:"docID,omitempty"` // cid is the CID of the composite of the document. Cid []byte `protobuf:"bytes,2,opt,name=cid,proto3" json:"cid,omitempty"` // schemaRoot is the SchemaRoot of the collection that the document resides in. @@ -565,9 +565,9 @@ func (*PushLogRequest_Body) Descriptor() ([]byte, []int) { return file_net_proto_rawDescGZIP(), []int{7, 0} } -func (x *PushLogRequest_Body) GetDocKey() []byte { +func (x *PushLogRequest_Body) GetDocID() []byte { if x != nil { - return x.DocKey + return x.DocID } return nil } @@ -604,59 +604,59 @@ var File_net_proto protoreflect.FileDescriptor var file_net_proto_rawDesc = []byte{ 0x0a, 0x09, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x6e, 0x65, 0x74, - 0x2e, 0x70, 0x62, 0x22, 0x53, 0x0a, 0x08, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, - 0x16, 0x0a, 0x06, 0x64, 0x6f, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x06, 0x64, 0x6f, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, - 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x1a, 0x1b, 0x0a, 0x03, 0x4c, - 0x6f, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, - 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, - 0x0a, 0x10, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, - 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, - 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, - 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, - 0x0d, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xd6, - 0x01, 0x0a, 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x12, 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, - 0x64, 0x79, 0x1a, 0x92, 0x01, 0x0a, 0x04, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x64, - 0x6f, 0x63, 0x4b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x6f, 0x63, - 0x4b, 0x65, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x03, 0x63, 0x69, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, - 0x6f, 0x6f, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, - 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, - 0x26, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6e, - 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x4c, - 0x6f, 0x67, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, - 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, - 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, - 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, - 0xd1, 0x02, 0x0a, 0x07, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, - 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, - 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, + 0x2e, 0x70, 0x62, 0x22, 0x51, 0x0a, 0x08, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, + 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, + 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x61, 0x64, 0x18, 0x04, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x04, 0x68, 0x65, 0x61, 0x64, 0x1a, 0x1b, 0x0a, 0x03, 0x4c, 0x6f, 0x67, + 0x12, 0x14, 0x0a, 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, + 0x05, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x22, 0x14, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, + 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x12, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, - 0x22, 0x00, 0x12, 0x48, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, - 0x70, 0x68, 0x12, 0x1b, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, - 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, - 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, - 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, - 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, - 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, - 0x6c, 0x79, 0x22, 0x00, 0x12, 0x39, 0x0a, 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, - 0x16, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, - 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, - 0x42, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, - 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, - 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, - 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, - 0x79, 0x22, 0x00, 0x42, 0x0a, 0x5a, 0x08, 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x22, 0x15, 0x0a, 0x13, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x50, 0x75, 0x73, 0x68, 0x44, + 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x0f, 0x0a, 0x0d, + 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0d, 0x0a, + 0x0b, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0xd4, 0x01, 0x0a, + 0x0e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x2f, 0x0a, 0x04, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, + 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x2e, 0x42, 0x6f, 0x64, 0x79, 0x52, 0x04, 0x62, 0x6f, 0x64, 0x79, + 0x1a, 0x90, 0x01, 0x0a, 0x04, 0x42, 0x6f, 0x64, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x64, 0x6f, 0x63, + 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x64, 0x6f, 0x63, 0x49, 0x44, 0x12, + 0x10, 0x0a, 0x03, 0x63, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x63, 0x69, + 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x6f, 0x6f, + 0x74, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x63, 0x72, 0x65, 0x61, 0x74, 0x6f, 0x72, 0x12, 0x26, 0x0a, 0x03, 0x6c, + 0x6f, 0x67, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, + 0x62, 0x2e, 0x44, 0x6f, 0x63, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x03, + 0x6c, 0x6f, 0x67, 0x22, 0x13, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, + 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, + 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x11, 0x0a, 0x0f, 0x47, 0x65, 0x74, 0x48, + 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x32, 0xd1, 0x02, 0x0a, 0x07, + 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x45, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x44, 0x6f, + 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1a, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, + 0x47, 0x65, 0x74, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x44, + 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x48, + 0x0a, 0x0c, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, 0x68, 0x12, 0x1b, + 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, + 0x72, 0x61, 0x70, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x6e, 0x65, + 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x44, 0x6f, 0x63, 0x47, 0x72, 0x61, 0x70, + 0x68, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x06, 0x47, 0x65, 0x74, 0x4c, + 0x6f, 0x67, 0x12, 0x15, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, + 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x6e, 0x65, 0x74, 0x2e, + 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, + 0x12, 0x39, 0x0a, 0x07, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x6e, 0x65, + 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x50, 0x75, 0x73, + 0x68, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x12, 0x42, 0x0a, 0x0a, 0x47, + 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x12, 0x19, 0x2e, 0x6e, 0x65, 0x74, 0x2e, + 0x70, 0x62, 0x2e, 0x47, 0x65, 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6e, 0x65, 0x74, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x65, + 0x74, 0x48, 0x65, 0x61, 0x64, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x70, 0x6c, 0x79, 0x22, 0x00, 0x42, + 0x0a, 0x5a, 0x08, 0x2f, 0x3b, 0x6e, 0x65, 0x74, 0x5f, 0x70, 0x62, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, } var ( diff --git a/net/pb/net.proto b/net/pb/net.proto index 45c38bb256..5b0ee35dfb 100644 --- a/net/pb/net.proto +++ b/net/pb/net.proto @@ -6,7 +6,7 @@ option go_package = "/;net_pb"; // Log represents a thread log. message Document { // ID of the document. - bytes docKey = 1; + bytes docID = 1; // head of the log. bytes head = 4; @@ -33,8 +33,8 @@ message PushLogRequest { Body body = 1; message Body { - // docKey is the DocKey of the document that is affected by the log. - bytes docKey = 1; + // docID is the ID of the document that is affected by the log. + bytes docID = 1; // cid is the CID of the composite of the document. bytes cid = 2; // schemaRoot is the SchemaRoot of the collection that the document resides in. diff --git a/net/pb/net_grpc.pb.go b/net/pb/net_grpc.pb.go index bad62cdad7..75ae790ab6 100644 --- a/net/pb/net_grpc.pb.go +++ b/net/pb/net_grpc.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. // versions: // - protoc-gen-go-grpc v1.3.0 -// - protoc v3.12.4 +// - protoc v4.25.1 // source: net.proto package net_pb diff --git a/net/pb/net_vtproto.pb.go b/net/pb/net_vtproto.pb.go index ae28bba13d..2bae8f83f3 100644 --- a/net/pb/net_vtproto.pb.go +++ b/net/pb/net_vtproto.pb.go @@ -95,10 +95,10 @@ func (m *Document) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i-- dAtA[i] = 0x22 } - if len(m.DocKey) > 0 { - i -= len(m.DocKey) - copy(dAtA[i:], m.DocKey) - i = encodeVarint(dAtA, i, uint64(len(m.DocKey))) + if len(m.DocID) > 0 { + i -= len(m.DocID) + copy(dAtA[i:], m.DocID) + i = encodeVarint(dAtA, i, uint64(len(m.DocID))) i-- dAtA[i] = 0xa } @@ -364,10 +364,10 @@ func (m *PushLogRequest_Body) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i-- dAtA[i] = 0x12 } - if len(m.DocKey) > 0 { - i -= len(m.DocKey) - copy(dAtA[i:], m.DocKey) - i = encodeVarint(dAtA, i, uint64(len(m.DocKey))) + if len(m.DocID) > 0 { + i -= len(m.DocID) + copy(dAtA[i:], m.DocID) + i = encodeVarint(dAtA, i, uint64(len(m.DocID))) i-- dAtA[i] = 0xa } @@ -547,7 +547,7 @@ func (m *Document) SizeVT() (n int) { } var l int _ = l - l = len(m.DocKey) + l = len(m.DocID) if l > 0 { n += 1 + l + sov(uint64(l)) } @@ -625,7 +625,7 @@ func (m *PushLogRequest_Body) SizeVT() (n int) { } var l int _ = l - l = len(m.DocKey) + l = len(m.DocID) if l > 0 { n += 1 + l + sov(uint64(l)) } @@ -815,7 +815,7 @@ func (m *Document) UnmarshalVT(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DocKey", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field DocID", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { @@ -842,9 +842,9 @@ func (m *Document) UnmarshalVT(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.DocKey = append(m.DocKey[:0], dAtA[iNdEx:postIndex]...) - if m.DocKey == nil { - m.DocKey = []byte{} + m.DocID = append(m.DocID[:0], dAtA[iNdEx:postIndex]...) + if m.DocID == nil { + m.DocID = []byte{} } iNdEx = postIndex case 4: @@ -1240,7 +1240,7 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field DocKey", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field DocID", wireType) } var byteLen int for shift := uint(0); ; shift += 7 { @@ -1267,9 +1267,9 @@ func (m *PushLogRequest_Body) UnmarshalVT(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.DocKey = append(m.DocKey[:0], dAtA[iNdEx:postIndex]...) - if m.DocKey == nil { - m.DocKey = []byte{} + m.DocID = append(m.DocID[:0], dAtA[iNdEx:postIndex]...) + if m.DocID == nil { + m.DocID = []byte{} } iNdEx = postIndex case 2: diff --git a/net/peer.go b/net/peer.go index 2e702584a8..acdba2e9c8 100644 --- a/net/peer.go +++ b/net/peer.go @@ -64,7 +64,7 @@ type Peer struct { p2pRPC *grpc.Server // rpc server over the P2P network // Used to close the dagWorker pool for a given document. - // The string represents a dockey. + // The string represents a docID. closeJob chan string sendJobs chan *dagJob @@ -266,7 +266,7 @@ func (p *Peer) handleBroadcastLoop() { // RegisterNewDocument registers a new document with the peer node. func (p *Peer) RegisterNewDocument( ctx context.Context, - dockey client.DocKey, + docID client.DocID, c cid.Cid, nd ipld.Node, schemaRoot string, @@ -274,23 +274,23 @@ func (p *Peer) RegisterNewDocument( log.Debug( p.ctx, "Registering a new document for our peer node", - logging.NewKV("DocKey", dockey.String()), + logging.NewKV("DocID", docID.String()), ) // register topic - if err := p.server.addPubSubTopic(dockey.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { + if err := p.server.addPubSubTopic(docID.String(), !p.server.hasPubSubTopic(schemaRoot)); err != nil { log.ErrorE( p.ctx, "Failed to create new pubsub topic", err, - logging.NewKV("DocKey", dockey.String()), + logging.NewKV("DocID", docID.String()), ) return err } // publish log body := &pb.PushLogRequest_Body{ - DocKey: []byte(dockey.String()), + DocID: []byte(docID.String()), Cid: c.Bytes(), SchemaRoot: []byte(schemaRoot), Creator: p.host.ID().String(), @@ -309,18 +309,18 @@ func (p *Peer) pushToReplicator( ctx context.Context, txn datastore.Txn, collection client.Collection, - keysCh <-chan client.DocKeysResult, + docIDsCh <-chan client.DocIDResult, pid peer.ID, ) { - for key := range keysCh { - if key.Err != nil { - log.ErrorE(ctx, "Key channel error", key.Err) + for docIDResult := range docIDsCh { + if docIDResult.Err != nil { + log.ErrorE(ctx, "Key channel error", docIDResult.Err) continue } - dockey := core.DataStoreKeyFromDocKey(key.Key) + docID := core.DataStoreKeyFromDocID(docIDResult.ID) headset := clock.NewHeadSet( txn.Headstore(), - dockey.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), + docID.WithFieldId(core.COMPOSITE_NAMESPACE).ToHeadStoreKey(), ) cids, priority, err := headset.List(ctx) if err != nil { @@ -328,7 +328,7 @@ func (p *Peer) pushToReplicator( ctx, "Failed to get heads", err, - logging.NewKV("DocKey", key.Key.String()), + logging.NewKV("DocID", docIDResult.ID.String()), logging.NewKV("PeerID", pid), logging.NewKV("Collection", collection.Name())) continue @@ -352,7 +352,7 @@ func (p *Peer) pushToReplicator( } evt := events.Update{ - DocKey: key.Key.String(), + DocID: docIDResult.ID.String(), Cid: c, SchemaRoot: collection.SchemaRoot(), Block: nd, @@ -420,14 +420,14 @@ func (p *Peer) loadP2PCollections(ctx context.Context) (map[string]struct{}, err } func (p *Peer) handleDocCreateLog(evt events.Update) error { - dockey, err := client.NewDocKeyFromString(evt.DocKey) + docID, err := client.NewDocIDFromString(evt.DocID) if err != nil { - return NewErrFailedToGetDockey(err) + return NewErrFailedToGetDocID(err) } // We need to register the document before pushing to the replicators if we want to // ensure that we have subscribed to the topic. - err = p.RegisterNewDocument(p.ctx, dockey, evt.Cid, evt.Block, evt.SchemaRoot) + err = p.RegisterNewDocument(p.ctx, docID, evt.Cid, evt.Block, evt.SchemaRoot) if err != nil { return err } @@ -438,19 +438,19 @@ func (p *Peer) handleDocCreateLog(evt events.Update) error { } func (p *Peer) handleDocUpdateLog(evt events.Update) error { - dockey, err := client.NewDocKeyFromString(evt.DocKey) + docID, err := client.NewDocIDFromString(evt.DocID) if err != nil { - return NewErrFailedToGetDockey(err) + return NewErrFailedToGetDocID(err) } log.Debug( p.ctx, "Preparing pubsub pushLog request from broadcast", - logging.NewKV("DocKey", dockey), + logging.NewKV("DocID", docID), logging.NewKV("CID", evt.Cid), logging.NewKV("SchemaRoot", evt.SchemaRoot)) body := &pb.PushLogRequest_Body{ - DocKey: []byte(dockey.String()), + DocID: []byte(docID.String()), Cid: evt.Cid.Bytes(), SchemaRoot: []byte(evt.SchemaRoot), Creator: p.host.ID().String(), @@ -465,8 +465,8 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { // push to each peer (replicator) p.pushLogToReplicators(p.ctx, evt) - if err := p.server.publishLog(p.ctx, evt.DocKey, req); err != nil { - return NewErrPublishingToDockeyTopic(err, evt.Cid.String(), evt.DocKey) + if err := p.server.publishLog(p.ctx, evt.DocID, req); err != nil { + return NewErrPublishingToDocIDTopic(err, evt.Cid.String(), evt.DocID) } if err := p.server.publishLog(p.ctx, evt.SchemaRoot, req); err != nil { @@ -479,7 +479,7 @@ func (p *Peer) handleDocUpdateLog(evt events.Update) error { func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { // push to each peer (replicator) peers := make(map[string]struct{}) - for _, peer := range p.ps.ListPeers(lg.DocKey) { + for _, peer := range p.ps.ListPeers(lg.DocID) { peers[peer.String()] = struct{}{} } for _, peer := range p.ps.ListPeers(lg.SchemaRoot) { @@ -503,7 +503,7 @@ func (p *Peer) pushLogToReplicators(ctx context.Context, lg events.Update) { p.ctx, "Failed pushing log", err, - logging.NewKV("DocKey", lg.DocKey), + logging.NewKV("DocID", lg.DocID), logging.NewKV("CID", lg.Cid), logging.NewKV("PeerID", peerID)) } diff --git a/net/peer_collection.go b/net/peer_collection.go index 58f83f7aa8..02bbb6e9a6 100644 --- a/net/peer_collection.go +++ b/net/peer_collection.go @@ -65,16 +65,16 @@ func (p *Peer) AddP2PCollections(ctx context.Context, collectionIDs []string) er // from the pubsub topics to avoid receiving duplicate events. removedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocKeys(p.ctx) + keyChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return err } for key := range keyChan { - err := p.server.removePubSubTopic(key.Key.String()) + err := p.server.removePubSubTopic(key.ID.String()) if err != nil { return p.rollbackRemovePubSubTopics(removedTopics, err) } - removedTopics = append(removedTopics, key.Key.String()) + removedTopics = append(removedTopics, key.ID.String()) } } @@ -130,16 +130,16 @@ func (p *Peer) RemoveP2PCollections(ctx context.Context, collectionIDs []string) // to the pubsub topics. addedTopics := []string{} for _, col := range storeCollections { - keyChan, err := col.GetAllDocKeys(p.ctx) + keyChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return err } for key := range keyChan { - err := p.server.addPubSubTopic(key.Key.String(), true) + err := p.server.addPubSubTopic(key.ID.String(), true) if err != nil { return p.rollbackAddPubSubTopics(addedTopics, err) } - addedTopics = append(addedTopics, key.Key.String()) + addedTopics = append(addedTopics, key.ID.String()) } } diff --git a/net/peer_replicator.go b/net/peer_replicator.go index c444dee58f..0506e018c4 100644 --- a/net/peer_replicator.go +++ b/net/peer_replicator.go @@ -92,9 +92,9 @@ func (p *Peer) SetReplicator(ctx context.Context, rep client.Replicator) error { // push all collection documents to the replicator peer for _, col := range added { - keysCh, err := col.WithTxn(txn).GetAllDocKeys(ctx) + keysCh, err := col.WithTxn(txn).GetAllDocIDs(ctx) if err != nil { - return NewErrReplicatorDocKey(err, col.Name(), rep.Info.ID) + return NewErrReplicatorDocID(err, col.Name(), rep.Info.ID) } p.pushToReplicator(ctx, txn, col, keysCh, rep.Info.ID) } diff --git a/net/peer_test.go b/net/peer_test.go index cdbc4581dc..780ae74e35 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -187,7 +187,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { ) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, ps, h.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, ps, h.ID(), doc.ID().String(), true) require.NoError(t, err) _, err = NewPeer(ctx, db, h, nil, ps, nil, nil) @@ -341,7 +341,7 @@ func TestRegisterNewDocument_NoError(t *testing.T) { cid, err := createCID(doc) require.NoError(t, err) - err = n.RegisterNewDocument(ctx, doc.Key(), cid, &EmptyNode{}, col.SchemaRoot()) + err = n.RegisterNewDocument(ctx, doc.ID(), cid, &EmptyNode{}, col.SchemaRoot()) require.NoError(t, err) } @@ -362,13 +362,13 @@ func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) require.NoError(t, err) cid, err := createCID(doc) require.NoError(t, err) - err = n.RegisterNewDocument(ctx, doc.Key(), cid, &EmptyNode{}, col.SchemaRoot()) + err = n.RegisterNewDocument(ctx, doc.ID(), cid, &EmptyNode{}, col.SchemaRoot()) require.Equal(t, err.Error(), "creating topic: joining topic: topic already exists") } @@ -482,7 +482,7 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test err = col.Create(ctx, doc) require.NoError(t, err) - keysCh, err := col.GetAllDocKeys(ctx) + keysCh, err := col.GetAllDocIDs(ctx) require.NoError(t, err) txn, err := db.NewTxn(ctx, true) @@ -805,14 +805,14 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) require.NoError(t, err) err = n.handleDocCreateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, @@ -821,15 +821,15 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { require.NoError(t, err) } -func TestHandleDocCreateLog_WithInvalidDockey_NoError(t *testing.T) { +func TestHandleDocCreateLog_WithInvalidDocID_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) defer n.Close() err := n.handleDocCreateLog(events.Update{ - DocKey: "some-invalid-key", + DocID: "some-invalid-key", }) - require.ErrorContains(t, err, "failed to get DocKey from broadcast message: selected encoding not supported") + require.ErrorContains(t, err, "failed to get DocID from broadcast message: selected encoding not supported") } func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { @@ -852,11 +852,11 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { err = col.Create(ctx, doc) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.ID().String(), true) require.NoError(t, err) err = n.handleDocCreateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), SchemaRoot: col.SchemaRoot(), }) require.ErrorContains(t, err, "topic already exists") @@ -888,14 +888,14 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) require.NoError(t, err) err = n.handleDocUpdateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, @@ -904,18 +904,18 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { require.NoError(t, err) } -func TestHandleDoUpdateLog_WithInvalidDockey_NoError(t *testing.T) { +func TestHandleDoUpdateLog_WithInvalidDocID_NoError(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) defer n.Close() err := n.handleDocUpdateLog(events.Update{ - DocKey: "some-invalid-key", + DocID: "some-invalid-key", }) - require.ErrorContains(t, err, "failed to get DocKey from broadcast message: selected encoding not supported") + require.ErrorContains(t, err, "failed to get DocID from broadcast message: selected encoding not supported") } -func TestHandleDocUpdateLog_WithExistingDockeyTopic_TopicExistsError(t *testing.T) { +func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) defer n.Close() @@ -941,17 +941,17 @@ func TestHandleDocUpdateLog_WithExistingDockeyTopic_TopicExistsError(t *testing. delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.ps, n.host.ID(), doc.ID().String(), true) require.NoError(t, err) err = n.handleDocUpdateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, @@ -985,7 +985,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. delta := &crdt.CompositeDAGDelta{ SchemaVersionID: col.Schema().VersionID, Priority: 1, - DocKey: doc.Key().Bytes(), + DocID: doc.ID().Bytes(), } node, err := makeNode(delta, []cid.Cid{docCid}) @@ -995,7 +995,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. require.NoError(t, err) err = n.handleDocUpdateLog(events.Update{ - DocKey: doc.Key().String(), + DocID: doc.ID().String(), Cid: docCid, SchemaRoot: col.SchemaRoot(), Block: node, diff --git a/net/process.go b/net/process.go index 3d776cc1c1..38a5a077fb 100644 --- a/net/process.go +++ b/net/process.go @@ -69,7 +69,7 @@ func (bp *blockProcessor) mergeBlocks(ctx context.Context) { ctx, "Failed to process block", err, - logging.NewKV("DocKey", bp.dsKey.DocKey), + logging.NewKV("DocID", bp.dsKey.DocID), logging.NewKV("CID", nd.Cid()), ) } @@ -111,7 +111,7 @@ func (bp *blockProcessor) processBlock(ctx context.Context, nd ipld.Node, field ctx, "Failed to process block", err, - logging.NewKV("DocKey", bp.dsKey.DocKey), + logging.NewKV("DocID", bp.dsKey.DocID), logging.NewKV("CID", nd.Cid()), ) } @@ -132,7 +132,7 @@ func initCRDTForType( description := col.Description() if field == "" { // empty field name implies composite type ctype = client.COMPOSITE - key = base.MakeCollectionKey( + key = base.MakeDataStoreKeyWithCollectionDescription( description, ).WithInstanceInfo( dsKey, @@ -155,7 +155,7 @@ func initCRDTForType( } ctype = fd.Typ fieldID := fd.ID.String() - key = base.MakeCollectionKey(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) + key = base.MakeDataStoreKeyWithCollectionDescription(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) return merklecrdt.NewMerkleLWWRegister( diff --git a/net/server.go b/net/server.go index b770e3cf2b..e93000d1b9 100644 --- a/net/server.go +++ b/net/server.go @@ -94,8 +94,8 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) return nil, err } - // Get all DocKeys across all collections in the DB - log.Debug(p.ctx, "Getting all existing DocKey...") + // Get all DocIDs across all collections in the DB + log.Debug(p.ctx, "Getting all existing DocIDs...") cols, err := s.db.GetAllCollections(s.peer.ctx) if err != nil { return nil, err @@ -103,28 +103,28 @@ func newServer(p *Peer, db client.DB, opts ...grpc.DialOption) (*server, error) i := 0 for _, col := range cols { - // If we subscribed to the collection, we skip subscribing to the collection's dockeys. + // If we subscribed to the collection, we skip subscribing to the collection's docIDs. if _, ok := colMap[col.SchemaRoot()]; ok { continue } - keyChan, err := col.GetAllDocKeys(p.ctx) + docIDChan, err := col.GetAllDocIDs(p.ctx) if err != nil { return nil, err } - for key := range keyChan { + for docID := range docIDChan { log.Debug( p.ctx, - "Registering existing DocKey pubsub topic", - logging.NewKV("DocKey", key.Key.String()), + "Registering existing DocID pubsub topic", + logging.NewKV("DocID", docID.ID.String()), ) - if err := s.addPubSubTopic(key.Key.String(), true); err != nil { + if err := s.addPubSubTopic(docID.ID.String(), true); err != nil { return nil, err } i++ } } - log.Debug(p.ctx, "Finished registering all DocKey pubsub topics", logging.NewKV("Count", i)) + log.Debug(p.ctx, "Finished registering all DocID pubsub topics", logging.NewKV("Count", i)) } var err error @@ -166,29 +166,29 @@ type docQueue struct { mu sync.Mutex } -// add adds a docKey to the queue. If the docKey is already in the queue, it will -// wait for the docKey to be removed from the queue. For every add call, done must -// be called to remove the docKey from the queue. Otherwise, subsequent add calls will +// add adds a docID to the queue. If the docID is already in the queue, it will +// wait for the docID to be removed from the queue. For every add call, done must +// be called to remove the docID from the queue. Otherwise, subsequent add calls will // block forever. -func (dq *docQueue) add(docKey string) { +func (dq *docQueue) add(docID string) { dq.mu.Lock() - done, ok := dq.docs[docKey] + done, ok := dq.docs[docID] if !ok { - dq.docs[docKey] = make(chan struct{}) + dq.docs[docID] = make(chan struct{}) } dq.mu.Unlock() if ok { <-done - dq.add(docKey) + dq.add(docID) } } -func (dq *docQueue) done(docKey string) { +func (dq *docQueue) done(docID string) { dq.mu.Lock() defer dq.mu.Unlock() - done, ok := dq.docs[docKey] + done, ok := dq.docs[docID] if ok { - delete(dq.docs, docKey) + delete(dq.docs, docID) close(done) } } @@ -205,14 +205,14 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL if err != nil { return nil, err } - dockey, err := client.NewDocKeyFromString(string(req.Body.DocKey)) + docID, err := client.NewDocIDFromString(string(req.Body.DocID)) if err != nil { return nil, err } - s.docQueue.add(dockey.String()) + s.docQueue.add(docID.String()) defer func() { - s.docQueue.done(dockey.String()) + s.docQueue.done(docID.String()) if s.pushLogEmitter != nil { byPeer, err := libpeer.Decode(req.Body.Creator) if err != nil { @@ -247,7 +247,7 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL } schemaRoot := string(req.Body.SchemaRoot) - dsKey := core.DataStoreKeyFromDocKey(dockey) + dsKey := core.DataStoreKeyFromDocID(docID) var txnErr error for retry := 0; retry < s.peer.db.MaxTxnRetries(); retry++ { @@ -292,17 +292,17 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL ctx, "Failed to process remote block", err, - logging.NewKV("DocKey", dsKey.DocKey), + logging.NewKV("DocID", dsKey.DocID), logging.NewKV("CID", cid), ) } session.Wait() bp.mergeBlocks(ctx) - // dagWorkers specific to the dockey will have been spawned within handleChildBlocks. + // dagWorkers specific to the DocID will have been spawned within handleChildBlocks. // Once we are done with the dag syncing process, we can get rid of those workers. if s.peer.closeJob != nil { - s.peer.closeJob <- dsKey.DocKey + s.peer.closeJob <- dsKey.DocID } if txnErr = txn.Commit(ctx); txnErr != nil { @@ -312,10 +312,10 @@ func (s *server) PushLog(ctx context.Context, req *pb.PushLogRequest) (*pb.PushL return &pb.PushLogReply{}, txnErr } - // Once processed, subscribe to the dockey topic on the pubsub network unless we already + // Once processed, subscribe to the DocID topic on the pubsub network unless we already // suscribe to the collection. if !s.hasPubSubTopic(col.SchemaRoot()) { - err = s.addPubSubTopic(dsKey.DocKey, true) + err = s.addPubSubTopic(dsKey.DocID, true) if err != nil { return nil, err } @@ -441,7 +441,7 @@ func (s *server) publishLog(ctx context.Context, topic string, req *pb.PushLogRe ctx, "Published log", logging.NewKV("CID", cid), - logging.NewKV("DocKey", topic), + logging.NewKV("DocID", topic), ) return nil } @@ -470,7 +470,7 @@ func (s *server) pubSubMessageHandler(from libpeer.ID, topic string, msg []byte) return nil, nil } -// pubSubEventHandler logs events from the subscribed dockey topics. +// pubSubEventHandler logs events from the subscribed DocID topics. func (s *server) pubSubEventHandler(from libpeer.ID, topic string, msg []byte) { log.Info( s.peer.ctx, diff --git a/net/server_test.go b/net/server_test.go index 6b5c3a3e20..521a3b7634 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -81,11 +81,11 @@ func TestNewServerWithCollectionSubscribed(t *testing.T) { require.NoError(t, err) } -type mockDBDockeysError struct { +type mockDBDocIDsError struct { client.DB } -func (mDB *mockDBDockeysError) GetAllCollections(context.Context) ([]client.Collection, error) { +func (mDB *mockDBDocIDsError) GetAllCollections(context.Context) ([]client.Collection, error) { return []client.Collection{ &mockCollection{}, }, nil @@ -98,11 +98,11 @@ type mockCollection struct { func (mCol *mockCollection) SchemaRoot() string { return "mockColID" } -func (mCol *mockCollection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { +func (mCol *mockCollection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { return nil, mockError } -func TestNewServerWithGetAllDockeysError(t *testing.T) { +func TestNewServerWithGetAllDocIDsError(t *testing.T) { ctx := context.Background() db, n := newTestNode(ctx, t) @@ -112,7 +112,7 @@ func TestNewServerWithGetAllDockeysError(t *testing.T) { }`) require.NoError(t, err) - mDB := mockDBDockeysError{db} + mDB := mockDBDocIDsError{db} _, err = newServer(n.Peer, &mDB) require.ErrorIs(t, err, mockError) @@ -137,7 +137,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { err = col.Create(ctx, doc) require.NoError(t, err) - _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.Key().String(), true) + _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) require.NoError(t, err) _, err = newServer(n.Peer, db) @@ -226,20 +226,20 @@ func TestDocQueue(t *testing.T) { docs: make(map[string]chan struct{}), } - testKey := "test" + testDocID := "test" - q.add(testKey) - go q.add(testKey) + q.add(testDocID) + go q.add(testDocID) // give time for the goroutine to block time.Sleep(10 * time.Millisecond) require.Len(t, q.docs, 1) - q.done(testKey) - // give time for the goroutine to add the key + q.done(testDocID) + // give time for the goroutine to add the docID time.Sleep(10 * time.Millisecond) q.mu.Lock() require.Len(t, q.docs, 1) q.mu.Unlock() - q.done(testKey) + q.done(testDocID) q.mu.Lock() require.Len(t, q.docs, 0) q.mu.Unlock() @@ -274,7 +274,7 @@ func TestPushLog(t *testing.T) { _, err = n.server.PushLog(ctx, &net_pb.PushLogRequest{ Body: &net_pb.PushLogRequest_Body{ - DocKey: []byte(doc.Key().String()), + DocID: []byte(doc.ID().String()), Cid: cid.Bytes(), SchemaRoot: []byte(col.SchemaRoot()), Creator: n.PeerID().String(), diff --git a/planner/commit.go b/planner/commit.go index b4fd3ed3c1..3caf6d2f4a 100644 --- a/planner/commit.go +++ b/planner/commit.go @@ -68,15 +68,15 @@ func (n *dagScanNode) Kind() string { func (n *dagScanNode) Init() error { if len(n.spans.Value) == 0 { - if n.commitSelect.DocKey.HasValue() { - key := core.DataStoreKey{}.WithDocKey(n.commitSelect.DocKey.Value()) + if n.commitSelect.DocID.HasValue() { + dsKey := core.DataStoreKey{}.WithDocID(n.commitSelect.DocID.Value()) if n.commitSelect.FieldID.HasValue() { field := n.commitSelect.FieldID.Value() - key = key.WithFieldId(field) + dsKey = dsKey.WithFieldId(field) } - n.spans = core.NewSpans(core.NewSpan(key, key.PrefixEnd())) + n.spans = core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) } } @@ -89,9 +89,9 @@ func (n *dagScanNode) Start() error { // Spans needs to parse the given span set. dagScanNode only // cares about the first value in the span set. The value is -// either a CID or a DocKey. +// either a CID or a DocID. // If its a CID, set the node CID val -// if its a DocKey, set the node Key val (headset) +// if its a DocID, set the node Key val (headset) func (n *dagScanNode) Spans(spans core.Spans) { if len(spans.Value) == 0 { return @@ -291,7 +291,7 @@ All the dagScanNode endpoints use similar structures func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.Link, error) { commit := n.commitSelect.DocumentMapping.NewDoc() cid := block.Cid() - n.commitSelect.DocumentMapping.SetFirstOfName(&commit, "cid", cid.String()) + n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.CidFieldName, cid.String()) // decode the delta, get the priority and payload nd, err := dag.DecodeProtobuf(block.RawData()) @@ -305,18 +305,18 @@ func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.L return core.Doc{}, nil, err } - prio, ok := delta["Priority"].(uint64) + prio, ok := delta[request.DeltaArgPriority].(uint64) if !ok { return core.Doc{}, nil, ErrDeltaMissingPriority } - schemaVersionId, ok := delta["SchemaVersionID"].(string) + schemaVersionId, ok := delta[request.DeltaArgSchemaVersionID].(string) if !ok { return core.Doc{}, nil, ErrDeltaMissingSchemaVersionID } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.SchemaVersionIDFieldName, schemaVersionId) - fieldName, ok := delta["FieldName"] + fieldName, ok := delta[request.DeltaArgFieldName] if !ok { return core.Doc{}, nil, ErrDeltaMissingFieldName } @@ -346,17 +346,17 @@ func (n *dagScanNode) dagBlockToNodeDoc(block blocks.Block) (core.Doc, []*ipld.L } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.HeightFieldName, int64(prio)) - n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.DeltaFieldName, delta["Data"]) + n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.DeltaFieldName, request.DeltaArgData) n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.FieldNameFieldName, fieldName) n.commitSelect.DocumentMapping.SetFirstOfName(&commit, request.FieldIDFieldName, fieldID) - dockey, ok := delta["DocKey"].([]byte) + docID, ok := delta[request.DeltaArgDocID].([]byte) if !ok { - return core.Doc{}, nil, ErrDeltaMissingDockey + return core.Doc{}, nil, ErrDeltaMissingDocID } n.commitSelect.DocumentMapping.SetFirstOfName(&commit, - request.DockeyFieldName, string(dockey)) + request.DocIDArgName, string(docID)) cols, err := n.planner.db.GetCollectionsByVersionID(n.planner.ctx, schemaVersionId) if err != nil { diff --git a/planner/create.go b/planner/create.go index 618591ccfe..c8c48b658d 100644 --- a/planner/create.go +++ b/planner/create.go @@ -86,7 +86,7 @@ func (n *createNode) Next() (bool, error) { currentValue := n.documentMapping.NewDoc() - currentValue.SetKey(n.doc.Key().String()) + currentValue.SetID(n.doc.ID().String()) for i, value := range n.doc.Values() { if len(n.documentMapping.IndexesByName[i.Name()]) > 0 { n.documentMapping.SetFirstOfName(¤tValue, i.Name(), value.Value()) @@ -101,8 +101,8 @@ func (n *createNode) Next() (bool, error) { n.currentValue = currentValue desc := n.collection.Description() - docKey := base.MakeDocKey(desc, currentValue.GetKey()) - n.results.Spans(core.NewSpans(core.NewSpan(docKey, docKey.PrefixEnd()))) + docID := base.MakeDataStoreKeyWithCollectionAndDocID(desc, currentValue.GetID()) + n.results.Spans(core.NewSpans(core.NewSpan(docID, docID.PrefixEnd()))) err := n.results.Init() if err != nil { diff --git a/planner/delete.go b/planner/delete.go index de59cf30b7..b1096ffdb6 100644 --- a/planner/delete.go +++ b/planner/delete.go @@ -27,7 +27,7 @@ type deleteNode struct { source planNode filter *mapper.Filter - ids []string + docIDs []string execInfo deleteExecInfo } @@ -49,11 +49,11 @@ func (n *deleteNode) Next() (bool, error) { } n.currentValue = n.source.Value() - key, err := client.NewDocKeyFromString(n.currentValue.GetKey()) + docID, err := client.NewDocIDFromString(n.currentValue.GetID()) if err != nil { return false, err } - _, err = n.collection.DeleteWithKey(n.p.ctx, key) + _, err = n.collection.DeleteWithDocID(n.p.ctx, docID) if err != nil { return false, err } @@ -88,7 +88,7 @@ func (n *deleteNode) simpleExplain() (map[string]any, error) { simpleExplainMap := map[string]any{} // Add the document id(s) that request wants to delete. - simpleExplainMap[idsLabel] = n.ids + simpleExplainMap[request.DocIDsArgName] = n.docIDs // Add the filter attribute if it exists, otherwise have it nil. if n.filter == nil { @@ -131,7 +131,7 @@ func (p *Planner) DeleteDocs(parsed *mapper.Mutation) (planNode, error) { return &deleteNode{ p: p, filter: parsed.Filter, - ids: parsed.DocKeys.Value(), + docIDs: parsed.DocIDs.Value(), collection: col.WithTxn(p.txn), source: slctNode, docMapper: docMapper{parsed.DocumentMapping}, diff --git a/planner/errors.go b/planner/errors.go index c4856178f3..54db7a7c79 100644 --- a/planner/errors.go +++ b/planner/errors.go @@ -20,9 +20,9 @@ const ( ) var ( - ErrDeltaMissingSchemaVersionID = errors.New("commit Delta missing schema version id") + ErrDeltaMissingSchemaVersionID = errors.New("commit Delta missing schema version ID") ErrDeltaMissingPriority = errors.New("commit Delta missing priority key") - ErrDeltaMissingDockey = errors.New("commit Delta missing dockey") + ErrDeltaMissingDocID = errors.New("commit Delta missing document ID") ErrDeltaMissingFieldName = errors.New("commit Delta missing field name") ErrFailedToFindScanNode = errors.New("failed to find original scan node in plan graph") ErrMissingQueryOrMutation = errors.New("request is missing query or mutation operation statements") @@ -34,9 +34,6 @@ var ( ErrMissingChildValue = errors.New("expected child value, however none was yielded") ErrUnknownRelationType = errors.New("failed sub selection, unknown relation type") ErrUnknownExplainRequestType = errors.New("can not explain request of unknown type") - ErrSubTypeInit = errors.New(errSubTypeInit) - ErrFailedToCollectExecExplainInfo = errors.New(errFailedToCollectExecExplainInfo) - ErrUnknownDependency = errors.New(errUnknownDependency) ) func NewErrUnknownDependency(name string) error { diff --git a/planner/explain.go b/planner/explain.go index 07f96f9b0a..76e562dc94 100644 --- a/planner/explain.go +++ b/planner/explain.go @@ -56,10 +56,8 @@ const ( dataLabel = "data" fieldNameLabel = "fieldName" filterLabel = "filter" - idsLabel = "ids" joinRootLabel = "root" joinSubTypeLabel = "subType" - keysLabel = "_keys" limitLabel = "limit" offsetLabel = "offset" sourcesLabel = "sources" @@ -170,7 +168,7 @@ func buildDebugExplainGraph(source planNode) (map[string]any, error) { // // query @explain { // user { -// _key +// _docID // age // name // } diff --git a/planner/group.go b/planner/group.go index 0890b13d84..ae2a0c8bee 100644 --- a/planner/group.go +++ b/planner/group.go @@ -230,10 +230,10 @@ func (n *groupNode) simpleExplain() (map[string]any, error) { c := child.Targetable // Get targetable attribute(s) of this child. - if c.DocKeys.HasValue() { - childExplainGraph["docKeys"] = c.DocKeys.Value() + if c.DocIDs.HasValue() { + childExplainGraph[request.DocIDsArgName] = c.DocIDs.Value() } else { - childExplainGraph["docKeys"] = nil + childExplainGraph[request.DocIDsArgName] = nil } if c.Filter == nil { diff --git a/planner/mapper/commitSelect.go b/planner/mapper/commitSelect.go index c71e4fdc20..969a87e938 100644 --- a/planner/mapper/commitSelect.go +++ b/planner/mapper/commitSelect.go @@ -20,7 +20,7 @@ type CommitSelect struct { Select // The key of the target document for which to get commits for. - DocKey immutable.Option[string] + DocID immutable.Option[string] // The field for which commits have been requested. FieldID immutable.Option[string] @@ -42,7 +42,7 @@ func (s *CommitSelect) CloneTo(index int) Requestable { func (s *CommitSelect) cloneTo(index int) *CommitSelect { return &CommitSelect{ Select: *s.Select.cloneTo(index), - DocKey: s.DocKey, + DocID: s.DocID, FieldID: s.FieldID, Cid: s.Cid, } diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index 3771cb5475..ff7e19ff21 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -762,7 +762,7 @@ func getTopLevelInfo( // be fine for now schema = schemas[0] } else { - mapping.Add(core.DocKeyFieldIndex, request.KeyFieldName) + mapping.Add(core.DocIDFieldIndex, request.DocIDFieldName) schema = collection.Schema() } @@ -1035,7 +1035,7 @@ func resolveSecondaryRelationIDs( if !siblingFound { objectFieldName := strings.TrimSuffix(existingField.Name, request.RelatedObjectID) - // We only require the dockey of the related object, so an empty join is all we need. + // We only require the docID of the related object, so an empty join is all we need. join, err := constructEmptyJoin( ctx, store, @@ -1069,7 +1069,7 @@ func ToCommitSelect( } return &CommitSelect{ Select: *underlyingSelect, - DocKey: selectRequest.DocKey, + DocID: selectRequest.DocID, FieldID: selectRequest.FieldID, Depth: selectRequest.Depth, Cid: selectRequest.Cid, @@ -1096,7 +1096,7 @@ func ToMutation(ctx context.Context, store client.Store, mutationRequest *reques func toTargetable(index int, selectRequest *request.Select, docMap *core.DocumentMapping) Targetable { return Targetable{ Field: toField(index, selectRequest), - DocKeys: selectRequest.DocKeys, + DocIDs: selectRequest.DocIDs, Filter: ToFilter(selectRequest.Filter.Value(), docMap), Limit: toLimit(selectRequest.Limit, selectRequest.Offset), GroupBy: toGroupBy(selectRequest.GroupBy, docMap), @@ -1141,7 +1141,7 @@ func toFilterMap( sourceClause any, mapping *core.DocumentMapping, ) (connor.FilterKey, any) { - if strings.HasPrefix(sourceKey, "_") && sourceKey != request.KeyFieldName { + if strings.HasPrefix(sourceKey, "_") && sourceKey != request.DocIDFieldName { key := &Operator{ Operation: sourceKey, } diff --git a/planner/mapper/targetable.go b/planner/mapper/targetable.go index 0b571e6830..ae9d81e29a 100644 --- a/planner/mapper/targetable.go +++ b/planner/mapper/targetable.go @@ -192,9 +192,9 @@ type Targetable struct { // The basic field information of this property. Field - // A optional collection of docKeys that can be specified to restrict results + // A optional collection of docIDs that can be specified to restrict results // to belonging to this set. - DocKeys immutable.Option[[]string] + DocIDs immutable.Option[[]string] // An optional filter, that can be specified to restrict results to documents // that satisfies all of its conditions. @@ -218,7 +218,7 @@ type Targetable struct { func (t *Targetable) cloneTo(index int) *Targetable { return &Targetable{ Field: *t.Field.cloneTo(index), - DocKeys: t.DocKeys, + DocIDs: t.DocIDs, Filter: t.Filter, Limit: t.Limit, GroupBy: t.GroupBy, diff --git a/planner/multi.go b/planner/multi.go index 02bd4a0fda..30bbc8338c 100644 --- a/planner/multi.go +++ b/planner/multi.go @@ -56,7 +56,7 @@ type appendNode interface { // Eg: // // user { -// _key +// _docID // name // friends { // name @@ -164,7 +164,7 @@ func (p *parallelNode) nextMerge(index int, plan mergeNode) (bool, error) { scan node ========= { - _key: bae-ALICE, + _docID: bae-ALICE, name: Alice, points: 124, verified: false @@ -175,7 +175,7 @@ typeJoin node(merge) { friends: [ { - _key: bae-BOB, + _docID: bae-BOB, name: bob, points: 99.9, verified: true, @@ -187,14 +187,14 @@ output ====== { - _key: bae-ALICE, + _docID: bae-ALICE, name: Alice, points: 124, verified: false, friends: [ { - _key: bae-BOB, + _docID: bae-BOB, name: bob, points: 99.9, verified: true, @@ -205,13 +205,13 @@ output */ func (p *parallelNode) nextAppend(index int, plan appendNode) (bool, error) { - key := p.currentValue.GetKey() + key := p.currentValue.GetID() if key == "" { return false, nil } // pass the doc key as a reference through the spans interface - spans := core.NewSpans(core.NewSpan(core.DataStoreKey{DocKey: key}, core.DataStoreKey{})) + spans := core.NewSpans(core.NewSpan(core.DataStoreKey{DocID: key}, core.DataStoreKey{})) plan.Spans(spans) err := plan.Init() if err != nil { @@ -239,7 +239,7 @@ func (p *parallelNode) nextAppend(index int, plan appendNode) (bool, error) { query { user { - _key + _docID name points verified @@ -253,7 +253,7 @@ query { scan node ========= { - _key: bae-ALICE, + _docID: bae-ALICE, name: Alice, points: 124, verified: false diff --git a/planner/scan.go b/planner/scan.go index 64a534da6d..19ae079f5f 100644 --- a/planner/scan.go +++ b/planner/scan.go @@ -170,7 +170,7 @@ func (n *scanNode) Start() error { func (n *scanNode) initScan() error { if !n.spans.HasValue { - start := base.MakeCollectionKey(n.col.Description()) + start := base.MakeDataStoreKeyWithCollectionDescription(n.col.Description()) n.spans = core.NewSpans(core.NewSpan(start, start.PrefixEnd())) } diff --git a/planner/select.go b/planner/select.go index 11b2ef510b..f1d85de9f3 100644 --- a/planner/select.go +++ b/planner/select.go @@ -114,7 +114,7 @@ type selectNode struct { // are defined in the subtype scan node. filter *mapper.Filter - keys immutable.Option[[]string] + docIDs immutable.Option[[]string] selectReq *mapper.Select groupSelects []*mapper.Select @@ -166,10 +166,10 @@ func (n *selectNode) Next() (bool, error) { n.execInfo.filterMatches++ - if n.keys.HasValue() { - docKey := n.currentValue.GetKey() - for _, key := range n.keys.Value() { - if docKey == key { + if n.docIDs.HasValue() { + docID := n.currentValue.GetID() + for _, docIDValue := range n.docIDs.Value() { + if docID == docIDValue { return true, nil } } @@ -199,11 +199,11 @@ func (n *selectNode) simpleExplain() (map[string]any, error) { simpleExplainMap[filterLabel] = n.filter.ToMap(n.documentMapping) } - // Add the keys attribute if it exists. - if !n.keys.HasValue() { - simpleExplainMap[keysLabel] = nil + // Add the docIDs attribute if it exists. + if !n.docIDs.HasValue() { + simpleExplainMap[request.DocIDsArgName] = nil } else { - simpleExplainMap[keysLabel] = n.keys.Value() + simpleExplainMap[request.DocIDsArgName] = n.docIDs.Value() } return simpleExplainMap, nil @@ -255,7 +255,7 @@ func (n *selectNode) initSource() ([]aggregateNode, error) { origScan.filter = n.filter n.filter = nil - // If we have both a DocKey and a CID, then we need to run + // If we have both a DocID and a CID, then we need to run // a TimeTravel (History-Traversing Versioned) query, which means // we need to propagate the values to the underlying VersionedFetcher if n.selectReq.Cid.HasValue() { @@ -264,21 +264,21 @@ func (n *selectNode) initSource() ([]aggregateNode, error) { return nil, err } spans := fetcher.NewVersionedSpan( - core.DataStoreKey{DocKey: n.selectReq.DocKeys.Value()[0]}, + core.DataStoreKey{DocID: n.selectReq.DocIDs.Value()[0]}, c, ) // @todo check len origScan.Spans(spans) - } else if n.selectReq.DocKeys.HasValue() { - // If we *just* have a DocKey(s), run a FindByDocKey(s) optimization - // if we have a FindByDockey filter, create a span for it + } else if n.selectReq.DocIDs.HasValue() { + // If we *just* have a DocID(s), run a FindByDocID(s) optimization + // if we have a FindByDocID filter, create a span for it // and propagate it to the scanNode // @todo: When running the optimizer, check if the filter object - // contains a _key equality condition, and upgrade it to a point lookup + // contains a _docID equality condition, and upgrade it to a point lookup // instead of a prefix scan + filter via the Primary Index (0), like here: - spans := make([]core.Span, len(n.selectReq.DocKeys.Value())) - for i, docKey := range n.selectReq.DocKeys.Value() { - dockeyIndexKey := base.MakeDocKey(sourcePlan.collection.Description(), docKey) - spans[i] = core.NewSpan(dockeyIndexKey, dockeyIndexKey.PrefixEnd()) + spans := make([]core.Span, len(n.selectReq.DocIDs.Value())) + for i, docID := range n.selectReq.DocIDs.Value() { + docIDIndexKey := base.MakeDataStoreKeyWithCollectionAndDocID(sourcePlan.collection.Description(), docID) + spans[i] = core.NewSpan(docIDIndexKey, docIDIndexKey.PrefixEnd()) } origScan.Spans(core.NewSpans(spans...)) } @@ -352,7 +352,7 @@ func (n *selectNode) initFields(selectReq *mapper.Select) ([]aggregateNode, erro // of that Target version we are querying. // So instead of a LatestCommit subquery, we need // a OneCommit subquery, with the supplied parameters. - commitSlct.DocKey = immutable.Some(selectReq.DocKeys.Value()[0]) // @todo check length + commitSlct.DocID = immutable.Some(selectReq.DocIDs.Value()[0]) // @todo check length commitSlct.Cid = selectReq.Cid } @@ -413,7 +413,7 @@ func (p *Planner) SelectFromSource( selectReq: selectReq, docMapper: docMapper{selectReq.DocumentMapping}, filter: selectReq.Filter, - keys: selectReq.DocKeys, + docIDs: selectReq.DocIDs, } limit := selectReq.Limit orderBy := selectReq.OrderBy @@ -468,7 +468,7 @@ func (p *Planner) Select(selectReq *mapper.Select) (planNode, error) { s := &selectNode{ planner: p, filter: selectReq.Filter, - keys: selectReq.DocKeys, + docIDs: selectReq.DocIDs, selectReq: selectReq, docMapper: docMapper{selectReq.DocumentMapping}, } diff --git a/planner/type_join.go b/planner/type_join.go index 47ba07e96b..fc4e6009cf 100644 --- a/planner/type_join.go +++ b/planner/type_join.go @@ -435,7 +435,7 @@ func fetchPrimaryDoc(node, subNode planNode, parentProp string) (bool, error) { subDoc := subNode.Value() ind := subNode.DocumentMap().FirstIndexOfName(parentProp) - docKeyStr, isStr := subDoc.Fields[ind].(string) + docIDStr, isStr := subDoc.Fields[ind].(string) if !isStr { return false, nil } @@ -444,9 +444,9 @@ func fetchPrimaryDoc(node, subNode planNode, parentProp string) (bool, error) { if scan == nil { return false, nil } - rootDocKey := base.MakeDocKey(scan.col.Description(), docKeyStr) + dsKey := base.MakeDataStoreKeyWithCollectionAndDocID(scan.col.Description(), docIDStr) - spans := core.NewSpans(core.NewSpan(rootDocKey, rootDocKey.PrefixEnd())) + spans := core.NewSpans(core.NewSpan(dsKey, dsKey.PrefixEnd())) node.Spans(spans) @@ -543,15 +543,15 @@ func (join *invertibleTypeJoin) processSecondResult(secondDocs []core.Doc) (any, if join.secondaryFetchLimit == 1 { if len(secondDocs) != 0 { secondResult = secondDocs[0] - secondIDResult = secondDocs[0].GetKey() + secondIDResult = secondDocs[0].GetID() } } else { secondResult = secondDocs - secondDocKeys := make([]string, len(secondDocs)) + secondDocIDs := make([]string, len(secondDocs)) for i, doc := range secondDocs { - secondDocKeys[i] = doc.GetKey() + secondDocIDs[i] = doc.GetID() } - secondIDResult = secondDocKeys + secondIDResult = secondDocIDs } join.root.Value().Fields[join.subSelect.Index] = secondResult if join.secondaryFieldIndex.HasValue() { @@ -573,7 +573,7 @@ func (join *invertibleTypeJoin) Next() (bool, error) { secondDocs, err := fetchDocsWithFieldValue( join.dir.secondNode, join.dir.secondaryField, - firstDoc.GetKey(), + firstDoc.GetID(), join.secondaryFetchLimit, ) if err != nil { diff --git a/planner/type_join.md b/planner/type_join.md index cf4573431f..e566168881 100644 --- a/planner/type_join.md +++ b/planner/type_join.md @@ -11,7 +11,7 @@ type User { type Friend { name: String friendsDate: DateTime - user_id: DocKey + user_id: DocID } - > @@ -23,7 +23,7 @@ type Friend { { query { user { selectTopNode -> (source) selectNode -> (source) scanNode(user) -> filter: NIL - [_key] + [_docID] name // key = bae-KHDFLGHJFLDG @@ -39,13 +39,13 @@ selectTopNode - > selectNode -> MultiNode.children: []planNode -> multiScanNode -> TypeJoinNode(merge**) -> TypeJoinOneMany -> (one) multiScanNode(scanNode(user)**) -> } -> scanNode(user).Value() -> doc -> (many) selectNode - > scanNode(friend) -1. NEXT/VALUES MultiNode.doc = {_key: bae-KHDFLGHJFLDG, name: "BOB"} -2. NEXT/VALUES TypeJoinOneMany.one {_key: bae-KHDFLGHJFLDG, name: "BOB"} +1. NEXT/VALUES MultiNode.doc = {_docID: bae-KHDFLGHJFLDG, name: "BOB"} +2. NEXT/VALUES TypeJoinOneMany.one {_docID: bae-KHDFLGHJFLDG, name: "BOB"} 3. NEXT/VALUES (many).selectNode.doc = {name: "Eric", date: Oct29} LOOP -4. NEXT/VALUES TypeJoinNode {_key: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{{name: "Eric", date: Oct29}}]} +4. NEXT/VALUES TypeJoinNode {_docID: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{{name: "Eric", date: Oct29}}]} 5. NEXT/VALUES (many).selectNode.doc = {name: "Jimmy", date: Oct21} -6. NEXT/VALUES TypeJoinNode {_key: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{name: "Eric", date: Oct29}, {name: "Jimmy", date: Oct21}]} +6. NEXT/VALUES TypeJoinNode {_docID: bae-KHDFLGHJFLDG, name: "BOB"} + {friends: [{name: "Eric", date: Oct29}, {name: "Jimmy", date: Oct21}]} GOTO LOOP // SPLIT FILTER @@ -65,7 +65,7 @@ query { { data: [ { - _key: bae-ALICE + _docID: bae-ALICE age: 22, name: "Alice", points: 45, @@ -80,7 +80,7 @@ query { }, { - _key: bae-CHARLIE + _docID: bae-CHARLIE age: 22, name: "Charlie", points: 45, @@ -142,7 +142,7 @@ type Address: { ... user: user - # user_id: DocKey + # user_id: DocID } query { diff --git a/planner/update.go b/planner/update.go index 36b5487c5e..78619bd55f 100644 --- a/planner/update.go +++ b/planner/update.go @@ -28,7 +28,8 @@ type updateNode struct { collection client.Collection filter *mapper.Filter - ids []string + + docIDs []string patch string @@ -62,11 +63,11 @@ func (n *updateNode) Next() (bool, error) { } n.currentValue = n.results.Value() - key, err := client.NewDocKeyFromString(n.currentValue.GetKey()) + docID, err := client.NewDocIDFromString(n.currentValue.GetID()) if err != nil { return false, err } - _, err = n.collection.UpdateWithKey(n.p.ctx, key, n.patch) + _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, n.patch) if err != nil { return false, err } @@ -115,7 +116,7 @@ func (n *updateNode) simpleExplain() (map[string]any, error) { simpleExplainMap := map[string]any{} // Add the document id(s) that request wants to update. - simpleExplainMap[idsLabel] = n.ids + simpleExplainMap[request.DocIDsArgName] = n.docIDs // Add the filter attribute if it exists, otherwise have it nil. if n.filter == nil { @@ -157,7 +158,7 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { update := &updateNode{ p: p, filter: parsed.Filter, - ids: parsed.DocKeys.Value(), + docIDs: parsed.DocIDs.Value(), isUpdating: true, patch: parsed.Data, docMapper: docMapper{parsed.DocumentMapping}, diff --git a/request/graphql/parser/commit.go b/request/graphql/parser/commit.go index 8c9d3e47b5..e4d4c01903 100644 --- a/request/graphql/parser/commit.go +++ b/request/graphql/parser/commit.go @@ -31,9 +31,9 @@ func parseCommitSelect(schema gql.Schema, parent *gql.Object, field *ast.Field) for _, argument := range field.Arguments { prop := argument.Name.Value - if prop == request.DocKey { + if prop == request.DocIDArgName { raw := argument.Value.(*ast.StringValue) - commit.DocKey = immutable.Some(raw.Value) + commit.DocID = immutable.Some(raw.Value) } else if prop == request.Cid { raw := argument.Value.(*ast.StringValue) commit.Cid = immutable.Some(raw.Value) diff --git a/request/graphql/parser/mutation.go b/request/graphql/parser/mutation.go index 37dea7290b..0802c745d6 100644 --- a/request/graphql/parser/mutation.go +++ b/request/graphql/parser/mutation.go @@ -117,10 +117,10 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re } mut.Filter = filter - } else if prop == request.Id { + } else if prop == request.DocIDArgName { raw := argument.Value.(*ast.StringValue) mut.IDs = immutable.Some([]string{raw.Value}) - } else if prop == request.Ids { + } else if prop == request.DocIDsArgName { raw := argument.Value.(*ast.ListValue) ids := make([]string, len(raw.Values)) for i, val := range raw.Values { diff --git a/request/graphql/parser/query.go b/request/graphql/parser/query.go index c76bde7b32..3213c7489a 100644 --- a/request/graphql/parser/query.go +++ b/request/graphql/parser/query.go @@ -124,16 +124,16 @@ func parseSelect( } slct.Filter = filter - case request.DocKey: // parse single dockey query field - val := astValue.(*ast.StringValue) - slct.DocKeys = immutable.Some([]string{val.Value}) - case request.DocKeys: - docKeyValues := astValue.(*ast.ListValue).Values - docKeys := make([]string, len(docKeyValues)) - for i, value := range docKeyValues { - docKeys[i] = value.(*ast.StringValue).Value + case request.DocIDArgName: // parse single DocID field + docIDValue := astValue.(*ast.StringValue) + slct.DocIDs = immutable.Some([]string{docIDValue.Value}) + case request.DocIDsArgName: + docIDValues := astValue.(*ast.ListValue).Values + docIDs := make([]string, len(docIDValues)) + for i, value := range docIDValues { + docIDs[i] = value.(*ast.StringValue).Value } - slct.DocKeys = immutable.Some(docKeys) + slct.DocIDs = immutable.Some(docIDs) case request.Cid: // parse single CID query field val := astValue.(*ast.StringValue) slct.CID = immutable.Some(val.Value) diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index 85f401fd35..bd0934d437 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -102,8 +102,8 @@ func collectionFromAstDefinition( ) (client.CollectionDefinition, error) { fieldDescriptions := []client.FieldDescription{ { - Name: request.KeyFieldName, - Kind: client.FieldKind_DocKey, + Name: request.DocIDFieldName, + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, } @@ -130,10 +130,10 @@ func collectionFromAstDefinition( // sort the fields lexicographically sort.Slice(fieldDescriptions, func(i, j int) bool { - // make sure that the _key (KeyFieldName) is always at the beginning - if fieldDescriptions[i].Name == request.KeyFieldName { + // make sure that the _docID is always at the beginning + if fieldDescriptions[i].Name == request.DocIDFieldName { return true - } else if fieldDescriptions[j].Name == request.KeyFieldName { + } else if fieldDescriptions[j].Name == request.DocIDFieldName { return false } return fieldDescriptions[i].Name < fieldDescriptions[j].Name @@ -336,8 +336,8 @@ func fieldsFromAST(field *ast.FieldDefinition, // An _id field is added for every 1-N relationship from this object. fieldDescriptions = append(fieldDescriptions, client.FieldDescription{ Name: fmt.Sprintf("%s_id", field.Name.Value), - Kind: client.FieldKind_DocKey, - Typ: defaultCRDTForFieldKind[client.FieldKind_DocKey], + Kind: client.FieldKind_DocID, + Typ: defaultCRDTForFieldKind[client.FieldKind_DocID], RelationType: client.Relation_Type_INTERNAL_ID, }) } else if kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { @@ -422,7 +422,7 @@ func astTypeToKind(t ast.Type) (client.FieldKind, error) { case *ast.Named: switch astTypeVal.Name.Value { case typeID: - return client.FieldKind_DocKey, nil + return client.FieldKind_DocID, nil case typeBoolean: return client.FieldKind_BOOL, nil case typeInt: diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index f267ae8ed0..7829d5e450 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -24,7 +24,7 @@ var ( //nolint:unused gqlTypeToFieldKindReference = map[gql.Type]client.FieldKind{ - gql.ID: client.FieldKind_DocKey, + gql.ID: client.FieldKind_DocID, gql.Boolean: client.FieldKind_BOOL, gql.Int: client.FieldKind_INT, gql.Float: client.FieldKind_FLOAT, @@ -40,7 +40,7 @@ var ( } fieldKindToGQLType = map[client.FieldKind]gql.Type{ - client.FieldKind_DocKey: gql.ID, + client.FieldKind_DocID: gql.ID, client.FieldKind_BOOL: gql.Boolean, client.FieldKind_BOOL_ARRAY: gql.NewList(gql.NewNonNull(gql.Boolean)), client.FieldKind_NILLABLE_BOOL_ARRAY: gql.NewList(gql.Boolean), @@ -59,7 +59,7 @@ var ( // This map is fine to use defaultCRDTForFieldKind = map[client.FieldKind]client.CType{ - client.FieldKind_DocKey: client.LWW_REGISTER, + client.FieldKind_DocID: client.LWW_REGISTER, client.FieldKind_BOOL: client.LWW_REGISTER, client.FieldKind_BOOL_ARRAY: client.LWW_REGISTER, client.FieldKind_NILLABLE_BOOL_ARRAY: client.LWW_REGISTER, @@ -80,14 +80,17 @@ var ( ) const ( - dockeyArgDescription string = ` -An optional dockey parameter for this field. Only documents with - the given dockey will be returned. If no documents match, the result + docIDFieldDescription string = ` +The immutable identifier/docID (primary key) value for this document. +` + docIDArgDescription string = ` +An optional docID parameter for this field. Only documents with + the given docID will be returned. If no documents match, the result will be null/empty. ` - dockeysArgDescription string = ` -An optional set of dockeys for this field. Only documents with a dockey - matching a dockey in the given set will be returned. If no documents match, + docIDsArgDescription string = ` +An optional set of docIDs for this field. Only documents with a docID + matching a docID in the given set will be returned. If no documents match, the result will be null/empty. If an empty set is provided, this argument will be ignored. ` @@ -132,13 +135,13 @@ Updates documents in this collection using the data provided. Only documents the update will be applied to all documents in the collection. ` updateIDArgDescription string = ` -An optional dockey value that will limit the update to the document with - a matching dockey. If no matching document is found, the operation will +An optional docID value that will limit the update to the document with + a matching docID. If no matching document is found, the operation will succeed, but no documents will be updated. ` updateIDsArgDescription string = ` -An optional set of dockey values that will limit the update to documents - with a matching dockey. If no matching documents are found, the operation will +An optional set of docID values that will limit the update to documents + with a matching docID. If no matching documents are found, the operation will succeed, but no documents will be updated. ` updateFilterArgDescription string = ` @@ -155,13 +158,13 @@ Deletes documents in this collection matching any provided criteria. If no criteria are provided all documents in the collection will be deleted. ` deleteIDArgDescription string = ` -An optional dockey value that will limit the delete to the document with - a matching dockey. If no matching document is found, the operation will +An optional docID value that will limit the delete to the document with + a matching docID. If no matching document is found, the operation will succeed, but no documents will be deleted. ` deleteIDsArgDescription string = ` -An optional set of dockey values that will limit the delete to documents with - a matching dockey. If no matching documents are found, the operation will +An optional set of docID values that will limit the delete to documents with + a matching docID. If no matching documents are found, the operation will succeed, but no documents will be deleted. If an empty set is provided, no documents will be deleted. ` @@ -169,9 +172,6 @@ An optional set of dockey values that will limit the delete to documents with An optional filter for this delete that will limit the delete to documents matching the given criteria. If no matching documents are found, the operation will succeed, but no documents will be deleted. -` - keyFieldDescription string = ` -The immutable primary key (dockey) value for this document. ` groupFieldDescription string = ` The group field may be used to return a set of records belonging to the group. diff --git a/request/graphql/schema/descriptions_test.go b/request/graphql/schema/descriptions_test.go index 2368b58c27..397436bca2 100644 --- a/request/graphql/schema/descriptions_test.go +++ b/request/graphql/schema/descriptions_test.go @@ -40,8 +40,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "User", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -89,8 +89,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "User", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -120,8 +120,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -169,8 +169,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -183,7 +183,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -209,8 +209,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -233,7 +233,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "published_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -267,8 +267,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "User", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -298,8 +298,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -347,8 +347,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -361,7 +361,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -387,8 +387,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -411,7 +411,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "published_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -445,8 +445,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -459,7 +459,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -485,8 +485,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -509,7 +509,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "published_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -543,8 +543,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Book", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { @@ -557,7 +557,7 @@ func TestSingleSimpleType(t *testing.T) { }, { Name: "author_id", - Kind: client.FieldKind_DocKey, + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, RelationType: client.Relation_Type_INTERNAL_ID, }, @@ -583,8 +583,8 @@ func TestSingleSimpleType(t *testing.T) { Name: "Author", Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.NONE_CRDT, }, { diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index f76c5623c6..556700cd7f 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -366,8 +366,8 @@ func (g *Generator) createExpandedFieldList( Description: f.Description, Type: gql.NewList(t), Args: gql.FieldConfigArgument{ - "dockey": schemaTypes.NewArgConfig(gql.String, dockeyArgDescription), - "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), dockeysArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.String, docIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), "filter": schemaTypes.NewArgConfig( g.manager.schema.TypeMap()[typeName+"FilterArg"], listFieldFilterArgDescription, @@ -435,16 +435,16 @@ func (g *Generator) buildTypes( fields := gql.Fields{} if !isEmbeddedObject { - // automatically add the _key: ID field to the type - fields[request.KeyFieldName] = &gql.Field{ - Description: keyFieldDescription, + // automatically add the _docID: ID field to the type + fields[request.DocIDFieldName] = &gql.Field{ + Description: docIDFieldDescription, Type: gql.ID, } } for _, field := range fieldDescriptions { - if field.Name == request.KeyFieldName { - // The `_key` field is included in the fieldDescriptions, + if field.Name == request.DocIDFieldName { + // The `_docID` field is included in the fieldDescriptions, // but we do not wish to override the standard definition // with the collection held definition (particularly the // description) @@ -999,10 +999,10 @@ func (g *Generator) genTypeMutationUpdateField( Description: updateDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "id": schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), - "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filter, updateFilterArgDescription), - "data": schemaTypes.NewArgConfig(gql.String, updateDataArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filter, updateFilterArgDescription), + "data": schemaTypes.NewArgConfig(gql.String, updateDataArgDescription), }, } return field, nil @@ -1017,9 +1017,9 @@ func (g *Generator) genTypeMutationDeleteField( Description: deleteDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "id": schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), - "ids": schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filter, deleteFilterArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), + "filter": schemaTypes.NewArgConfig(filter, deleteFilterArgDescription), }, } return field, nil @@ -1065,7 +1065,7 @@ func (g *Generator) genTypeFilterArgInput(obj *gql.Object) *gql.InputObject { // generate basic filter operator blocks // @todo: Extract object field loop into its own utility func for f, field := range obj.Fields() { - if _, ok := request.ReservedFields[f]; ok && f != request.KeyFieldName { + if _, ok := request.ReservedFields[f]; ok && f != request.DocIDFieldName { continue } // scalars (leafs) @@ -1169,7 +1169,7 @@ func (g *Generator) genTypeOrderArgInput(obj *gql.Object) *gql.InputObject { fields := gql.InputObjectConfigFieldMap{} for f, field := range obj.Fields() { - if _, ok := request.ReservedFields[f]; ok && f != request.KeyFieldName { + if _, ok := request.ReservedFields[f]; ok && f != request.DocIDFieldName { continue } typeMap := g.manager.schema.TypeMap() @@ -1216,10 +1216,10 @@ func (g *Generator) genTypeQueryableFieldList( Description: obj.Description(), Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ - "dockey": schemaTypes.NewArgConfig(gql.String, dockeyArgDescription), - "dockeys": schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), dockeysArgDescription), - "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), - "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), + request.DocIDArgName: schemaTypes.NewArgConfig(gql.String, docIDArgDescription), + request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.NewNonNull(gql.String)), docIDsArgDescription), + "cid": schemaTypes.NewArgConfig(gql.String, cidArgDescription), + "filter": schemaTypes.NewArgConfig(config.filter, selectFilterArgDescription), "groupBy": schemaTypes.NewArgConfig( gql.NewList(gql.NewNonNull(config.groupBy)), schemaTypes.GroupByArgDescription, diff --git a/request/graphql/schema/types/commits.go b/request/graphql/schema/types/commits.go index 4da8d2dd3a..1e8d6b5bb4 100644 --- a/request/graphql/schema/types/commits.go +++ b/request/graphql/schema/types/commits.go @@ -33,7 +33,7 @@ var ( // type Commit { // Height: Int // CID: String - // Dockey: String + // DocID: String // CollectionID: Int // SchemaVersionID: String // Delta: String @@ -55,8 +55,8 @@ var ( Description: commitCIDFieldDescription, Type: gql.String, }, - "dockey": &gql.Field{ - Description: commitDockeyFieldDescription, + request.DocIDArgName: &gql.Field{ + Description: commitDocIDFieldDescription, Type: gql.String, }, "collectionID": &gql.Field{ @@ -125,8 +125,8 @@ var ( Description: commitCIDFieldDescription, Type: OrderingEnum, }, - "dockey": &gql.InputObjectFieldConfig{ - Description: commitDockeyFieldDescription, + request.DocIDArgName: &gql.InputObjectFieldConfig{ + Description: commitDocIDFieldDescription, Type: OrderingEnum, }, "collectionID": &gql.InputObjectFieldConfig{ @@ -150,9 +150,9 @@ var ( Value: "cid", Description: commitCIDFieldDescription, }, - "dockey": &gql.EnumValueConfig{ - Value: "dockey", - Description: commitDockeyFieldDescription, + request.DocIDArgName: &gql.EnumValueConfig{ + Value: request.DocIDArgName, + Description: commitDocIDFieldDescription, }, "collectionID": &gql.EnumValueConfig{ Value: "collectionID", @@ -175,10 +175,10 @@ var ( Description: commitsQueryDescription, Type: gql.NewList(CommitObject), Args: gql.FieldConfigArgument{ - "dockey": NewArgConfig(gql.ID, commitDockeyArgDescription), - request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), - "order": NewArgConfig(CommitsOrderArg, OrderArgDescription), - "cid": NewArgConfig(gql.ID, commitCIDArgDescription), + request.DocIDArgName: NewArgConfig(gql.ID, commitDocIDArgDescription), + request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), + "order": NewArgConfig(CommitsOrderArg, OrderArgDescription), + "cid": NewArgConfig(gql.ID, commitCIDArgDescription), "groupBy": NewArgConfig( gql.NewList( gql.NewNonNull( @@ -198,8 +198,8 @@ var ( Description: latestCommitsQueryDescription, Type: gql.NewList(CommitObject), Args: gql.FieldConfigArgument{ - "dockey": NewArgConfig(gql.NewNonNull(gql.ID), commitDockeyArgDescription), - request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), + request.DocIDArgName: NewArgConfig(gql.NewNonNull(gql.ID), commitDocIDArgDescription), + request.FieldIDName: NewArgConfig(gql.String, commitFieldIDArgDescription), }, } ) diff --git a/request/graphql/schema/types/descriptions.go b/request/graphql/schema/types/descriptions.go index b60c9f009d..42c1ba956e 100644 --- a/request/graphql/schema/types/descriptions.go +++ b/request/graphql/schema/types/descriptions.go @@ -38,9 +38,9 @@ Commit represents an individual commit to a MerkleCRDT, every mutation to a commit composed of the field level commits and, in the case of an update, the prior composite commit. ` - commitDockeyArgDescription string = ` -An optional dockey parameter for this commit query. Only commits for a document - with a matching dockey will be returned. If no documents match, the result + commitDocIDArgDescription string = ` +An optional docID parameter for this commit query. Only commits for a document + with a matching docID will be returned. If no documents match, the result set will be empty. ` commitFieldIDArgDescription string = ` @@ -71,8 +71,8 @@ Height represents the location of the commit in the DAG. All commits (composite, The unique CID of this commit, and the primary means through which to safely identify a specific commit. ` - commitDockeyFieldDescription string = ` -The dockey of the document that this commit is for. + commitDocIDFieldDescription string = ` +The docID of the document that this commit is for. ` commitCollectionIDFieldDescription string = ` The ID of the collection that this commit was committed against. diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index 0a9127d816..fda850e9a9 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -126,7 +126,7 @@ func BackfillBenchmarkDB( fixture fixtures.Generator, docCount, opCount int, doSync bool, -) ([][]client.DocKey, error) { +) ([][]client.DocID, error) { numTypes := len(fixture.Types()) // load fixtures @@ -134,7 +134,7 @@ func BackfillBenchmarkDB( wg.Add(docCount) errCh := make(chan error) waitCh := make(chan struct{}) - dockeys := make([][]client.DocKey, docCount) + listOfDocIDs := make([][]client.DocID, docCount) go func() { // Cut up the job from into writeBatchGroup size grouped jobs. @@ -159,7 +159,7 @@ func BackfillBenchmarkDB( } // create the documents - keys := make([]client.DocKey, numTypes) + docIDs := make([]client.DocID, numTypes) for j := 0; j < numTypes; j++ { doc, err := client.NewDocFromJSON([]byte(docs[j])) if err != nil { @@ -177,17 +177,17 @@ func BackfillBenchmarkDB( log.Info( ctx, "Failed to commit TX for doc %s, retrying...\n", - logging.NewKV("DocKey", doc.Key()), + logging.NewKV("DocID", doc.ID()), ) continue } else if err != nil { errCh <- errors.Wrap("failed to create document", err) } - keys[j] = doc.Key() + docIDs[j] = doc.ID() break } } - dockeys[index] = keys + listOfDocIDs[index] = docIDs wg.Done() batchWg.Done() @@ -205,7 +205,7 @@ func BackfillBenchmarkDB( // finish or err select { case <-waitCh: - return dockeys, nil + return listOfDocIDs, nil case err := <-errCh: return nil, err } diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index dfb63fc86b..2ef7123493 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -40,7 +40,7 @@ func runCollectionBenchGet( } defer db.Close() - dockeys, err := benchutils.BackfillBenchmarkDB( + listOfDocIDs, err := benchutils.BackfillBenchmarkDB( b, ctx, collections, @@ -55,9 +55,9 @@ func runCollectionBenchGet( // run benchmark if doSync { - return runCollectionBenchGetSync(b, ctx, collections, fixture, docCount, opCount, dockeys) + return runCollectionBenchGetSync(b, ctx, collections, fixture, docCount, opCount, listOfDocIDs) } - return runCollectionBenchGetAsync(b, ctx, collections, fixture, docCount, opCount, dockeys) + return runCollectionBenchGetAsync(b, ctx, collections, fixture, docCount, opCount, listOfDocIDs) } func runCollectionBenchGetSync(b *testing.B, @@ -65,14 +65,14 @@ func runCollectionBenchGetSync(b *testing.B, collections []client.Collection, fixture fixtures.Generator, docCount, opCount int, - dockeys [][]client.DocKey, + listOfDocIDs [][]client.DocID, ) error { numTypes := len(fixture.Types()) b.ResetTimer() for i := 0; i < b.N; i++ { // outer benchmark loop for j := 0; j < opCount/numTypes; j++ { // number of Get operations we want to execute for k := 0; k < numTypes; k++ { // apply op to all the related types - collections[k].Get(ctx, dockeys[j][k], false) //nolint:errcheck + collections[k].Get(ctx, listOfDocIDs[j][k], false) //nolint:errcheck } } } @@ -88,7 +88,7 @@ func runCollectionBenchGetAsync(b *testing.B, collections []client.Collection, fixture fixtures.Generator, docCount, opCount int, - dockeys [][]client.DocKey, + listOfDocIDs [][]client.DocID, ) error { var wg sync.WaitGroup numTypes := len(fixture.Types()) @@ -97,10 +97,10 @@ func runCollectionBenchGetAsync(b *testing.B, for j := 0; j < opCount/numTypes; j++ { // number of Get operations we want to execute for k := 0; k < numTypes; k++ { // apply op to all the related types wg.Add(1) - go func(ctx context.Context, col client.Collection, dockey client.DocKey) { - col.Get(ctx, dockey, false) //nolint:errcheck + go func(ctx context.Context, col client.Collection, docID client.DocID) { + col.Get(ctx, docID, false) //nolint:errcheck wg.Done() - }(ctx, collections[k], dockeys[j][k]) + }(ctx, collections[k], listOfDocIDs[j][k]) } } diff --git a/tests/bench/query/index/simple_test.go b/tests/bench/query/index/simple_test.go index e675086a2a..2f15aff59c 100644 --- a/tests/bench/query/index/simple_test.go +++ b/tests/bench/query/index/simple_test.go @@ -22,7 +22,7 @@ var ( userSimpleWithFilterQuery = ` query { User(filter: { Age: { _eq: 30 } }) { - _key + _docID Name Age Points diff --git a/tests/bench/query/planner/simple_test.go b/tests/bench/query/planner/simple_test.go index e911002911..b6bdedac8e 100644 --- a/tests/bench/query/planner/simple_test.go +++ b/tests/bench/query/planner/simple_test.go @@ -21,7 +21,7 @@ var ( userSimpleQuery = ` query { User { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/simple_test.go b/tests/bench/query/simple/simple_test.go index a9791bcbc7..14f2591d89 100644 --- a/tests/bench/query/simple/simple_test.go +++ b/tests/bench/query/simple/simple_test.go @@ -21,7 +21,7 @@ var ( userSimpleQuery = ` query { User { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/utils.go b/tests/bench/query/simple/utils.go index 8c6f82579b..14752e7ae2 100644 --- a/tests/bench/query/simple/utils.go +++ b/tests/bench/query/simple/utils.go @@ -41,7 +41,7 @@ func RunQueryBenchGet( } defer db.Close() - dockeys, err := benchutils.BackfillBenchmarkDB( + listOfDocIDs, err := benchutils.BackfillBenchmarkDB( b, ctx, collections, @@ -54,7 +54,7 @@ func RunQueryBenchGet( return err } - return runQueryBenchGetSync(b, ctx, db, docCount, dockeys, query) + return runQueryBenchGetSync(b, ctx, db, docCount, listOfDocIDs, query) } func runQueryBenchGetSync( @@ -62,11 +62,11 @@ func runQueryBenchGetSync( ctx context.Context, db client.DB, docCount int, - dockeys [][]client.DocKey, + listOfDocIDs [][]client.DocID, query string, ) error { - // run any preprocessing on the query before execution (mostly just dockey insertion if needed) - query = formatQuery(b, query, dockeys) + // run any preprocessing on the query before execution (mostly just docID insertion if needed) + query = formatQuery(b, query, listOfDocIDs) b.ResetTimer() for i := 0; i < b.N; i++ { @@ -89,37 +89,37 @@ func runQueryBenchGetSync( return nil } -func formatQuery(b *testing.B, query string, dockeys [][]client.DocKey) string { - numPlaceholders := strings.Count(query, "{{dockey}}") +func formatQuery(b *testing.B, query string, listOfDocIDs [][]client.DocID) string { + numPlaceholders := strings.Count(query, "{{docID}}") if numPlaceholders == 0 { return query } - // create a copy of dockeys since we'll be mutating it - dockeysCopy := dockeys[:] + // create a copy of docIDs since we'll be mutating it + docIDsCopy := listOfDocIDs[:] // b.Logf("formatting query, replacing %v instances", numPlaceholders) // b.Logf("Query before: %s", query) - if len(dockeysCopy) < numPlaceholders { + if len(docIDsCopy) < numPlaceholders { b.Fatalf( "Invalid number of query placeholders, max is %v requested is %v", - len(dockeys), + len(listOfDocIDs), numPlaceholders, ) } for i := 0; i < numPlaceholders; i++ { - // pick a random dockey, needs to be unique accross all + // pick a random docID, needs to be unique accross all // loop iterations, so remove the selected one so the next // iteration cant potentially pick it. - rIndex := rand.Intn(len(dockeysCopy)) - key := dockeysCopy[rIndex][0] + rIndex := rand.Intn(len(docIDsCopy)) + docID := docIDsCopy[rIndex][0] - // remove selected key - dockeysCopy = append(dockeysCopy[:rIndex], dockeysCopy[rIndex+1:]...) + // remove selected docID + docIDsCopy = append(docIDsCopy[:rIndex], docIDsCopy[rIndex+1:]...) // replace - query = strings.Replace(query, "{{dockey}}", key.String(), 1) + query = strings.Replace(query, "{{docID}}", docID.String(), 1) } // b.Logf("Query After: %s", query) diff --git a/tests/bench/query/simple/with_filter_test.go b/tests/bench/query/simple/with_filter_test.go index 60081167a3..7fbc15989b 100644 --- a/tests/bench/query/simple/with_filter_test.go +++ b/tests/bench/query/simple/with_filter_test.go @@ -21,7 +21,7 @@ var ( userSimpleWithFilterQuery = ` query { User(filter: {Age: {_gt: 10}}) { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/with_limit_offset_test.go b/tests/bench/query/simple/with_limit_offset_test.go index e47d8f347e..d770302cd0 100644 --- a/tests/bench/query/simple/with_limit_offset_test.go +++ b/tests/bench/query/simple/with_limit_offset_test.go @@ -21,7 +21,7 @@ var ( userSimpleWithLimitOffsetQuery = ` query { User(limit: 10, offset: 5) { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/with_multi_lookup_test.go b/tests/bench/query/simple/with_multi_lookup_test.go index 2c744319a3..f862095189 100644 --- a/tests/bench/query/simple/with_multi_lookup_test.go +++ b/tests/bench/query/simple/with_multi_lookup_test.go @@ -18,11 +18,11 @@ import ( ) var ( - // 10x dockey will be replaced in the bench runner func + // 10x `docID`s will be replaced in the bench runner func userSimpleWithMultiLookupQuery = ` query { - User(dockeys: ["{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}", "{{dockey}}"]) { - _key + User(docIDs: ["{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}", "{{docID}}"]) { + _docID Name Age Points diff --git a/tests/bench/query/simple/with_order_test.go b/tests/bench/query/simple/with_order_test.go index 2b12817713..981e4a699d 100644 --- a/tests/bench/query/simple/with_order_test.go +++ b/tests/bench/query/simple/with_order_test.go @@ -21,7 +21,7 @@ var ( userSimpleWithSortQuery = ` query { User(order: {Age: ASC}) { - _key + _docID Name Age Points diff --git a/tests/bench/query/simple/with_single_lookup_test.go b/tests/bench/query/simple/with_single_lookup_test.go index d432f730be..088fbd22d0 100644 --- a/tests/bench/query/simple/with_single_lookup_test.go +++ b/tests/bench/query/simple/with_single_lookup_test.go @@ -18,11 +18,11 @@ import ( ) var ( - // dockey will be replaced in the bench runner func + // The `docID` will be replaced in the bench runner func userSimpleWithSingleLookupQuery = ` query { - User(dockey: "{{dockey}}") { - _key + User(docID: "{{docID}}") { + _docID Name Age Points diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index f29135d201..abef339cfd 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -58,9 +58,9 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name) - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -84,9 +84,9 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er docMapList := make([]map[string]any, len(docs)) for i, doc := range docs { - // We must call this here, else the doc key on the given object will not match + // We must call this here, else the docID on the given object will not match // that of the document saved in the database - err := doc.RemapAliasFieldsAndDockey(c.Schema().Fields) + err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) if err != nil { return err } @@ -115,7 +115,7 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er func (c *Collection) Update(ctx context.Context, doc *client.Document) error { args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name) - args = append(args, "--key", doc.Key().String()) + args = append(args, "--docID", doc.ID().String()) document, err := doc.ToJSONPatch() if err != nil { @@ -132,7 +132,7 @@ func (c *Collection) Update(ctx context.Context, doc *client.Document) error { } func (c *Collection) Save(ctx context.Context, doc *client.Document) error { - _, err := c.Get(ctx, doc.Key(), true) + _, err := c.Get(ctx, doc.ID(), true) if err == nil { return c.Update(ctx, doc) } @@ -142,16 +142,16 @@ func (c *Collection) Save(ctx context.Context, doc *client.Document) error { return err } -func (c *Collection) Delete(ctx context.Context, docKey client.DocKey) (bool, error) { - res, err := c.DeleteWithKey(ctx, docKey) +func (c *Collection) Delete(ctx context.Context, docID client.DocID) (bool, error) { + res, err := c.DeleteWithDocID(ctx, docID) if err != nil { return false, err } return res.Count == 1, nil } -func (c *Collection) Exists(ctx context.Context, docKey client.DocKey) (bool, error) { - _, err := c.Get(ctx, docKey, false) +func (c *Collection) Exists(ctx context.Context, docID client.DocID) (bool, error) { + _, err := c.Get(ctx, docID, false) if err != nil { return false, err } @@ -162,10 +162,10 @@ func (c *Collection) UpdateWith(ctx context.Context, target any, updater string) switch t := target.(type) { case string, map[string]any, *request.Filter: return c.UpdateWithFilter(ctx, t, updater) - case client.DocKey: - return c.UpdateWithKey(ctx, t, updater) - case []client.DocKey: - return c.UpdateWithKeys(ctx, t, updater) + case client.DocID: + return c.UpdateWithDocID(ctx, t, updater) + case []client.DocID: + return c.UpdateWithDocIDs(ctx, t, updater) default: return nil, client.ErrInvalidUpdateTarget } @@ -204,33 +204,33 @@ func (c *Collection) UpdateWithFilter( return c.updateWith(ctx, args) } -func (c *Collection) UpdateWithKey( +func (c *Collection) UpdateWithDocID( ctx context.Context, - key client.DocKey, + docID client.DocID, updater string, ) (*client.UpdateResult, error) { args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name) - args = append(args, "--key", key.String()) + args = append(args, "--docID", docID.String()) args = append(args, "--updater", updater) return c.updateWith(ctx, args) } -func (c *Collection) UpdateWithKeys( +func (c *Collection) UpdateWithDocIDs( ctx context.Context, - docKeys []client.DocKey, + docIDs []client.DocID, updater string, ) (*client.UpdateResult, error) { args := []string{"client", "collection", "update"} args = append(args, "--name", c.Description().Name) args = append(args, "--updater", updater) - keys := make([]string, len(docKeys)) - for i, v := range docKeys { - keys[i] = v.String() + strDocIDs := make([]string, len(docIDs)) + for i, v := range docIDs { + strDocIDs[i] = v.String() } - args = append(args, "--key", strings.Join(keys, ",")) + args = append(args, "--docID", strings.Join(strDocIDs, ",")) return c.updateWith(ctx, args) } @@ -239,10 +239,10 @@ func (c *Collection) DeleteWith(ctx context.Context, target any) (*client.Delete switch t := target.(type) { case string, map[string]any, *request.Filter: return c.DeleteWithFilter(ctx, t) - case client.DocKey: - return c.DeleteWithKey(ctx, t) - case []client.DocKey: - return c.DeleteWithKeys(ctx, t) + case client.DocID: + return c.DeleteWithDocID(ctx, t) + case []client.DocID: + return c.DeleteWithDocIDs(ctx, t) default: return nil, client.ErrInvalidDeleteTarget } @@ -276,31 +276,31 @@ func (c *Collection) DeleteWithFilter(ctx context.Context, filter any) (*client. return c.deleteWith(ctx, args) } -func (c *Collection) DeleteWithKey(ctx context.Context, docKey client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocID(ctx context.Context, docID client.DocID) (*client.DeleteResult, error) { args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name) - args = append(args, "--key", docKey.String()) + args = append(args, "--docID", docID.String()) return c.deleteWith(ctx, args) } -func (c *Collection) DeleteWithKeys(ctx context.Context, docKeys []client.DocKey) (*client.DeleteResult, error) { +func (c *Collection) DeleteWithDocIDs(ctx context.Context, docIDs []client.DocID) (*client.DeleteResult, error) { args := []string{"client", "collection", "delete"} args = append(args, "--name", c.Description().Name) - keys := make([]string, len(docKeys)) - for i, v := range docKeys { - keys[i] = v.String() + strDocIDs := make([]string, len(docIDs)) + for i, v := range docIDs { + strDocIDs[i] = v.String() } - args = append(args, "--key", strings.Join(keys, ",")) + args = append(args, "--docID", strings.Join(strDocIDs, ",")) return c.deleteWith(ctx, args) } -func (c *Collection) Get(ctx context.Context, key client.DocKey, showDeleted bool) (*client.Document, error) { +func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bool) (*client.Document, error) { args := []string{"client", "collection", "get"} args = append(args, "--name", c.Description().Name) - args = append(args, key.String()) + args = append(args, docID.String()) if showDeleted { args = append(args, "--show-deleted") @@ -324,40 +324,40 @@ func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { } } -func (c *Collection) GetAllDocKeys(ctx context.Context) (<-chan client.DocKeysResult, error) { - args := []string{"client", "collection", "keys"} +func (c *Collection) GetAllDocIDs(ctx context.Context) (<-chan client.DocIDResult, error) { + args := []string{"client", "collection", "docIDs"} args = append(args, "--name", c.Description().Name) stdOut, _, err := c.cmd.executeStream(ctx, args) if err != nil { return nil, err } - docKeyCh := make(chan client.DocKeysResult) + docIDCh := make(chan client.DocIDResult) go func() { dec := json.NewDecoder(stdOut) - defer close(docKeyCh) + defer close(docIDCh) for { - var res http.DocKeyResult + var res http.DocIDResult if err := dec.Decode(&res); err != nil { return } - key, err := client.NewDocKeyFromString(res.Key) + docID, err := client.NewDocIDFromString(res.DocID) if err != nil { return } - docKey := client.DocKeysResult{ - Key: key, + docIDResult := client.DocIDResult{ + ID: docID, } if res.Error != "" { - docKey.Err = fmt.Errorf(res.Error) + docIDResult.Err = fmt.Errorf(res.Error) } - docKeyCh <- docKey + docIDCh <- docIDResult } }() - return docKeyCh, nil + return docIDCh, nil } func (c *Collection) CreateIndex( diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index 52ea3148e5..c425c8de8f 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -71,10 +71,10 @@ func newRandomDocGenerator(types map[string]client.CollectionDefinition, config } type genDoc struct { - // the dockey of the document. Its cached value from doc.Key().String() just to avoid + // the docID of the document. Its cached value from doc.ID().String() just to avoid // calculating it multiple times. - docKey string - doc *client.Document + docID string + doc *client.Document } type randomDocGenerator struct { @@ -117,10 +117,10 @@ func (g *randomDocGenerator) getMaxTotalDemand() int { return totalDemand } -// getNextPrimaryDocKey returns the key of the next primary document to be used as a relation. -func (g *randomDocGenerator) getNextPrimaryDocKey(secondaryType string, field *client.FieldDescription) string { +// getNextPrimaryDocID returns the docID of the next primary document to be used as a relation. +func (g *randomDocGenerator) getNextPrimaryDocID(secondaryType string, field *client.FieldDescription) string { ind := g.configurator.usageCounter.getNextTypeIndForField(secondaryType, field) - return g.generatedDocs[field.Schema][ind].docKey + return g.generatedDocs[field.Schema][ind].docID } func (g *randomDocGenerator) generateRandomDocs(order []string) error { @@ -134,12 +134,12 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { for i := 0; i < totalDemand; i++ { newDoc := make(map[string]any) for _, field := range typeDef.Schema.Fields { - if field.Name == request.KeyFieldName { + if field.Name == request.DocIDFieldName { continue } if field.IsRelation() { if field.IsPrimaryRelation() { - newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocKey(typeName, &field) + newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocID(typeName, &field) } } else { fieldConf := g.configurator.config.ForField(typeName, field.Name) @@ -151,7 +151,7 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { return err } g.generatedDocs[typeName] = append(g.generatedDocs[typeName], - genDoc{docKey: doc.Key().String(), doc: doc}) + genDoc{docID: doc.ID().String(), doc: doc}) } } return nil diff --git a/tests/gen/gen_auto_configurator.go b/tests/gen/gen_auto_configurator.go index 55a15737ea..4049e7ba4d 100644 --- a/tests/gen/gen_auto_configurator.go +++ b/tests/gen/gen_auto_configurator.go @@ -83,7 +83,7 @@ func (c *typeUsageCounters) addRelationUsage( // getNextTypeIndForField returns the next index to be used for a foreign field. func (c *typeUsageCounters) getNextTypeIndForField(secondaryType string, field *client.FieldDescription) int { current := c.m[field.Schema][secondaryType][field.Name] - return current.useNextDocKey() + return current.useNextDocIDIndex() } type relationUsage struct { @@ -93,9 +93,9 @@ type relationUsage struct { minSecDocsPerPrimary int // maxSecDocsPerPrimary is the maximum number of primary documents that should be used for the relation. maxSecDocsPerPrimary int - // docKeysCounter is a slice of structs that keep track of the number of times + // docIDsCounter is a slice of structs that keep track of the number of times // each primary document has been used for the relation. - docKeysCounter []struct { + docIDsCounter []struct { // ind is the index of the primary document. ind int // count is the number of times the primary document has been used for the relation. @@ -116,27 +116,27 @@ func newRelationUsage(minSecDocPerPrim, maxSecDocPerPrim, numDocs int, random *r } } -// useNextDocKey determines the next primary document to be used for the relation, tracks +// useNextDocIDIndex determines the next primary document to be used for the relation, tracks // it and returns its index. -func (u *relationUsage) useNextDocKey() int { - docKeyCounterInd := 0 +func (u *relationUsage) useNextDocIDIndex() int { + docIDCounterInd := 0 // if a primary document has a minimum number of secondary documents that should be // generated for it, then it should be used until that minimum is reached. // After that, we can pick a random primary document to use. if u.counter >= u.minSecDocsPerPrimary*u.numAvailablePrimaryDocs { - docKeyCounterInd = u.random.Intn(len(u.docKeysCounter)) + docIDCounterInd = u.random.Intn(len(u.docIDsCounter)) } else { - docKeyCounterInd = u.counter % len(u.docKeysCounter) + docIDCounterInd = u.counter % len(u.docIDsCounter) } - currentInd := u.docKeysCounter[docKeyCounterInd].ind - docCounter := &u.docKeysCounter[docKeyCounterInd] + currentInd := u.docIDsCounter[docIDCounterInd].ind + docCounter := &u.docIDsCounter[docIDCounterInd] docCounter.count++ // if the primary document reached max number of secondary documents, we can remove it // from the slice of primary documents that are available for the relation. if docCounter.count >= u.maxSecDocsPerPrimary { - lastCounterInd := len(u.docKeysCounter) - 1 - *docCounter = u.docKeysCounter[lastCounterInd] - u.docKeysCounter = u.docKeysCounter[:lastCounterInd] + lastCounterInd := len(u.docIDsCounter) - 1 + *docCounter = u.docIDsCounter[lastCounterInd] + u.docIDsCounter = u.docIDsCounter[:lastCounterInd] } u.counter++ @@ -145,14 +145,14 @@ func (u *relationUsage) useNextDocKey() int { // allocateIndexes allocates the indexes for the relation usage tracker. func (u *relationUsage) allocateIndexes() { - docKeysCounter := make([]struct { + docIDsCounter := make([]struct { ind int count int }, u.numAvailablePrimaryDocs) - for i := range docKeysCounter { - docKeysCounter[i].ind = i + for i := range docIDsCounter { + docIDsCounter[i].ind = i } - u.docKeysCounter = docKeysCounter + u.docIDsCounter = docIDsCounter } func newDocGenConfigurator(types map[string]client.CollectionDefinition, config configsMap) docsGenConfigurator { diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index 5e4c62a0a2..a9a8d81136 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -60,10 +60,10 @@ func getBooleanField(t *testing.T, doc *client.Document, fieldName string) bool return val } -func getDocKeysFromDocs(docs []*client.Document) []string { +func getDocIDsFromDocs(docs []*client.Document) []string { result := make([]string, 0, len(docs)) for _, doc := range docs { - result = append(result, doc.Key().String()) + result = append(result, doc.ID().String()) } return result } @@ -90,7 +90,7 @@ func removeDuplicateStr(strSlice []string) []string { return list } -func assertDocKeysMatch( +func assertDocIDsMatch( t *testing.T, docs []GeneratedDoc, primaryCol, secondaryCol string, @@ -100,7 +100,7 @@ func assertDocKeysMatch( primaryDocs := filterByCollection(docs, primaryCol) secondaryDocs := filterByCollection(docs, secondaryCol) - docKeys := getDocKeysFromDocs(primaryDocs) + docIDs := getDocIDsFromDocs(primaryDocs) foreignValues := make([]string, 0, len(secondaryDocs)) for _, secDoc := range secondaryDocs { foreignValues = append(foreignValues, getStringField(t, secDoc, foreignField)) @@ -111,7 +111,7 @@ func assertDocKeysMatch( foreignValues = newValues } - assert.ElementsMatch(t, docKeys, foreignValues) + assert.ElementsMatch(t, docIDs, foreignValues) } func assertUniformlyDistributedIntFieldRange(t *testing.T, docs []GeneratedDoc, fieldName string, minVal, maxVal int) { @@ -211,8 +211,8 @@ func assertUniformRelationDistribution( secondaryPerPrimary := make(map[string]int) for _, d := range secondaryCol { - docKey := getStringField(t, d, foreignField) - secondaryPerPrimary[docKey]++ + docID := getStringField(t, d, foreignField) + secondaryPerPrimary[docID]++ } minDocsPerPrimary := math.MaxInt maxDocsPerPrimary := math.MinInt @@ -347,7 +347,7 @@ func TestAutoGenerateFromSchema_RelationOneToOne(t *testing.T) { assert.Len(t, filterByCollection(docs, "User"), numUsers) assert.Len(t, filterByCollection(docs, "Device"), numUsers) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", false) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", false) } func TestAutoGenerateFromSchema_RelationOneToMany(t *testing.T) { @@ -369,7 +369,7 @@ func TestAutoGenerateFromSchema_RelationOneToMany(t *testing.T) { assert.Len(t, filterByCollection(docs, "User"), numUsers) assert.Len(t, filterByCollection(docs, "Device"), numUsers*2) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_RelationOneToManyWithConfiguredNumberOfElements(t *testing.T) { @@ -396,7 +396,7 @@ func TestAutoGenerateFromSchema_RelationOneToManyWithConfiguredNumberOfElements( assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", minDevicesPerUser, maxDevicesPerUser) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_RelationOneToManyToOneWithConfiguredNumberOfElements(t *testing.T) { @@ -430,8 +430,8 @@ func TestAutoGenerateFromSchema_RelationOneToManyToOneWithConfiguredNumberOfElem assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) - assertDocKeysMatch(t, docs, "Device", "Specs", "device_id", false) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "Device", "Specs", "device_id", false) } func TestAutoGenerateFromSchema_RelationOneToManyToOnePrimaryWithConfiguredNumberOfElements(t *testing.T) { @@ -465,8 +465,8 @@ func TestAutoGenerateFromSchema_RelationOneToManyToOnePrimaryWithConfiguredNumbe assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", devicesPerUser, devicesPerUser) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) - assertDocKeysMatch(t, docs, "Specs", "Device", "specs_id", false) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "Specs", "Device", "specs_id", false) } func TestAutoGenerateFromSchema_RelationOneToManyToManyWithNumDocsForSecondaryType(t *testing.T) { @@ -510,9 +510,9 @@ func TestAutoGenerateFromSchema_RelationOneToManyToManyWithNumDocsForSecondaryTy assertUniformRelationDistribution(t, docs, "Device", "Specs", "device_id", 1, 1) assertUniformRelationDistribution(t, docs, "Device", "Component", "device_id", componentsPerDevice, componentsPerDevice) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) - assertDocKeysMatch(t, docs, "Device", "Specs", "device_id", false) - assertDocKeysMatch(t, docs, "Device", "Component", "device_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "Device", "Specs", "device_id", false) + assertDocIDsMatch(t, docs, "Device", "Component", "device_id", true) } func TestAutoGenerateFromSchema_DemandsForDifferentRelationTrees(t *testing.T) { @@ -549,7 +549,7 @@ func TestAutoGenerateFromSchema_DemandsForDifferentRelationTrees(t *testing.T) { assertUniformRelationDistribution(t, docs, "Device", "Component", "device_id", componentsPerDevice, componentsPerDevice) - assertDocKeysMatch(t, docs, "Device", "Component", "device_id", true) + assertDocIDsMatch(t, docs, "Device", "Component", "device_id", true) } func TestAutoGenerateFromSchema_IfTypeDemandedForSameTreeAddsUp_ShouldGenerate(t *testing.T) { @@ -638,7 +638,7 @@ func TestAutoGenerateFromSchema_IfDemand2TypesWithOptions_ShouldAdjust(t *testin assert.Len(t, filterByCollection(docs, "User"), numUsers) assert.Len(t, filterByCollection(docs, "Device"), numDevices) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_IfDemand2TypesWithOptionsAndFieldDemand_ShouldAdjust(t *testing.T) { @@ -668,7 +668,7 @@ func TestAutoGenerateFromSchema_IfDemand2TypesWithOptionsAndFieldDemand_ShouldAd assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", 1, 5) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_IfDemand2TypesWithRangeOptions_ShouldAdjust(t *testing.T) { @@ -699,7 +699,7 @@ func TestAutoGenerateFromSchema_IfDemand2TypesWithRangeOptions_ShouldAdjust(t *t assertUniformRelationDistribution(t, docs, "User", "Device", "owner_id", 1, 5) - assertDocKeysMatch(t, docs, "User", "Device", "owner_id", true) + assertDocIDsMatch(t, docs, "User", "Device", "owner_id", true) } func TestAutoGenerateFromSchema_ConfigThatCanNotBySupplied(t *testing.T) { diff --git a/tests/integration/backup/one_to_many/export_test.go b/tests/integration/backup/one_to_many/export_test.go index 328d48bd6d..3626535b9d 100644 --- a/tests/integration/backup/one_to_many/export_test.go +++ b/tests/integration/backup/one_to_many/export_test.go @@ -28,7 +28,7 @@ func TestBackupExport_JustUserCollection_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -57,7 +57,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -90,7 +90,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndMultipleDocUpdate_NoError(t * Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-4399f189-138d-5d49-9e25-82e78463677b","_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b","_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Game of chains"},{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, }, }, } diff --git a/tests/integration/backup/one_to_many/import_test.go b/tests/integration/backup/one_to_many/import_test.go index f3c189365d..193867cc8d 100644 --- a/tests/integration/backup/one_to_many/import_test.go +++ b/tests/integration/backup/one_to_many/import_test.go @@ -84,28 +84,28 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr ImportContent: `{ "Book":[ { - "_key":"bae-4399f189-138d-5d49-9e25-82e78463677b", - "_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", + "_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b", + "_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"Game of chains" }, { - "_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", - "_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", + "_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", + "_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"John and the sourcerers' stone" } ], "User":[ { - "_key":"bae-0648f44e-74e8-593b-a662-3310ec278927", - "_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927", "age":31, "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "age":31, "name":"John" } @@ -137,7 +137,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr Book { name author { - _key + _docID } } }`, @@ -145,13 +145,13 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr { "name": "Game of chains", "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", }, }, { "name": "John and the sourcerers' stone", "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", }, }, }, diff --git a/tests/integration/backup/one_to_one/export_test.go b/tests/integration/backup/one_to_one/export_test.go index c5bb798643..4ae32cbebc 100644 --- a/tests/integration/backup/one_to_one/export_test.go +++ b/tests/integration/backup/one_to_one/export_test.go @@ -28,7 +28,7 @@ func TestBackupExport_JustUserCollection_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -57,7 +57,7 @@ func TestBackupExport_AllCollectionsMultipleDocsAndDocUpdate_NoError(t *testing. Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da","_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -101,7 +101,7 @@ func TestBackupExport_DoubleReletionship_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_newKey":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_docIDNew":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"}]}`, }, }, } @@ -149,7 +149,7 @@ func TestBackupExport_DoubleReletionshipWithUpdate_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_key":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_newKey":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"},{"_key":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_newKey":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}],"Book":[{"_docID":"bae-45b1def4-4e63-5a93-a1b8-f7b08e682164","_docIDNew":"bae-add2ccfe-84a1-519c-ab7d-c54b43909532","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-0648f44e-74e8-593b-a662-3310ec278927","name":"John and the sourcerers' stone"},{"_docID":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_docIDNew":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}]}`, }, }, } diff --git a/tests/integration/backup/one_to_one/import_test.go b/tests/integration/backup/one_to_one/import_test.go index f827c81670..5405dd4225 100644 --- a/tests/integration/backup/one_to_one/import_test.go +++ b/tests/integration/backup/one_to_one/import_test.go @@ -84,22 +84,22 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr ImportContent: `{ "Book":[ { - "_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", - "_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", + "_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", + "_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"John and the sourcerers' stone" } ], "User":[ { - "_key":"bae-0648f44e-74e8-593b-a662-3310ec278927", - "_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927", "age":31, "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "age":31, "name":"John" } @@ -131,7 +131,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr Book { name author { - _key + _docID } } }`, @@ -139,7 +139,7 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndUpdatedDocs_NoEr { "name": "John and the sourcerers' stone", "author": map[string]any{ - "_key": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID": "bae-807ea028-6c13-5f86-a72b-46e8b715a162", }, }, }, @@ -157,28 +157,28 @@ func TestBackupImport_WithMultipleNoKeyAndMultipleCollectionsAndMultipleUpdatedD ImportContent: `{ "Book":[ { - "_key":"bae-4399f189-138d-5d49-9e25-82e78463677b", - "_newKey":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", + "_docID":"bae-4399f189-138d-5d49-9e25-82e78463677b", + "_docIDNew":"bae-78a40f28-a4b8-5dca-be44-392b0f96d0ff", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"Game of chains" }, { - "_key":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", - "_newKey":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", + "_docID":"bae-5cf2fec3-d8ed-50d5-8286-39109853d2da", + "_docIDNew":"bae-edeade01-2d21-5d6d-aadf-efc5a5279de5", "author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "name":"John and the sourcerers' stone" } ], "User":[ { - "_key":"bae-0648f44e-74e8-593b-a662-3310ec278927", - "_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927", + "_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927", "age":31, "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", - "_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162", "age":31, "name":"John" } @@ -211,7 +211,7 @@ func TestBackupImport_DoubleRelationshipWithUpdate_NoError(t *testing.T) { `, }, testUtils.BackupImport{ - ImportContent: `{"Book":[{"_key":"bae-236c14bd-4621-5d43-bc03-4442f3b8719e","_newKey":"bae-6dbb3738-d3db-5121-acee-6fbdd97ff7a8","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"},{"_key":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_newKey":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}],"User":[{"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927","_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, + ImportContent: `{"Book":[{"_docID":"bae-236c14bd-4621-5d43-bc03-4442f3b8719e","_docIDNew":"bae-6dbb3738-d3db-5121-acee-6fbdd97ff7a8","author_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","favourite_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"John and the sourcerers' stone"},{"_docID":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","_docIDNew":"bae-da7f2d88-05c4-528a-846a-0d18ab26603b","name":"Game of chains"}],"User":[{"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927","_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927","age":31,"name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, }, testUtils.Request{ Request: ` diff --git a/tests/integration/backup/self_reference/export_test.go b/tests/integration/backup/self_reference/export_test.go index e0d0c606cf..9a0c73a8d1 100644 --- a/tests/integration/backup/self_reference/export_test.go +++ b/tests/integration/backup/self_reference/export_test.go @@ -32,7 +32,7 @@ func TestBackupExport_Simple_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_newKey":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","age":31,"boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519","name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_docIDNew":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","age":31,"boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519","name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -57,7 +57,7 @@ func TestBackupExport_MultipleDocsAndDocUpdate_NoError(t *testing.T) { Doc: `{"age": 31}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_newKey":"bae-067fd15e-32a1-5681-8f41-c423f563e21b","age":31,"boss_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Bob"},{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d","_docIDNew":"bae-067fd15e-32a1-5681-8f41-c423f563e21b","age":31,"boss_id":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","name":"Bob"},{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-807ea028-6c13-5f86-a72b-46e8b715a162","age":31,"name":"John"}]}`, }, }, } diff --git a/tests/integration/backup/self_reference/import_test.go b/tests/integration/backup/self_reference/import_test.go index 71c44361a1..0a68a66d85 100644 --- a/tests/integration/backup/self_reference/import_test.go +++ b/tests/integration/backup/self_reference/import_test.go @@ -25,13 +25,13 @@ func TestBackupSelfRefImport_Simple_NoError(t *testing.T) { ImportContent: `{ "User":[ { - "_key":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d", + "_docID":"bae-790e7e49-f2e3-5ad6-83d9-5dfb6d8ba81d", "age":31, "boss_id":"bae-e933420a-988a-56f8-8952-6c245aebd519", "name":"Bob" }, { - "_key":"bae-e933420a-988a-56f8-8952-6c245aebd519", + "_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519", "age":30, "name":"John" } @@ -71,8 +71,8 @@ func TestBackupSelfRefImport_SelfRef_NoError(t *testing.T) { expectedExportData := `{` + `"User":[` + `{` + - `"_key":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + - `"_newKey":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + + `"_docID":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + + `"_docIDNew":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + `"age":31,` + `"boss_id":"bae-0648f44e-74e8-593b-a662-3310ec278927",` + `"name":"Bob"` + @@ -269,16 +269,16 @@ func TestBackupSelfRefImport_SplitPrimaryRelationWithSecondCollection_NoError(t expectedExportData := `{` + `"Author":[` + `{` + - `"_key":"bae-d760e445-22ef-5956-9947-26de226891f6",` + - `"_newKey":"bae-e3a6ff01-33ff-55f4-88f9-d13db26274c8",` + + `"_docID":"bae-d760e445-22ef-5956-9947-26de226891f6",` + + `"_docIDNew":"bae-e3a6ff01-33ff-55f4-88f9-d13db26274c8",` + `"book_id":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + `"name":"John"` + `}` + `],` + `"Book":[` + `{` + - `"_key":"bae-4059cb15-2b30-5049-b0df-64cc7ad9b5e4",` + - `"_newKey":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + + `"_docID":"bae-4059cb15-2b30-5049-b0df-64cc7ad9b5e4",` + + `"_docIDNew":"bae-c821a0a9-7afc-583b-accb-dc99a09c1ff8",` + `"name":"John and the sourcerers' stone",` + `"reviewedBy_id":"bae-e3a6ff01-33ff-55f4-88f9-d13db26274c8"` + `}` + diff --git a/tests/integration/backup/simple/export_test.go b/tests/integration/backup/simple/export_test.go index 7ee2e65bd5..d7397b13cb 100644 --- a/tests/integration/backup/simple/export_test.go +++ b/tests/integration/backup/simple/export_test.go @@ -25,7 +25,7 @@ func TestBackupExport_Simple_NoError(t *testing.T) { Doc: `{"name": "John", "age": 30}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } @@ -41,7 +41,7 @@ func TestBackupExport_Empty_NoError(t *testing.T) { Doc: `{}`, }, testUtils.BackupExport{ - ExpectedContent: `{"User":[{"_key":"bae-524bfa06-849c-5daf-b6df-05c2da80844d","_newKey":"bae-524bfa06-849c-5daf-b6df-05c2da80844d"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-524bfa06-849c-5daf-b6df-05c2da80844d","_docIDNew":"bae-524bfa06-849c-5daf-b6df-05c2da80844d"}]}`, }, }, } @@ -98,7 +98,7 @@ func TestBackupExport_JustUserCollection_NoError(t *testing.T) { Config: client.BackupConfig{ Collections: []string{"User"}, }, - ExpectedContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, }, } diff --git a/tests/integration/backup/simple/import_test.go b/tests/integration/backup/simple/import_test.go index a53760fa3e..cdfadc6e61 100644 --- a/tests/integration/backup/simple/import_test.go +++ b/tests/integration/backup/simple/import_test.go @@ -20,7 +20,7 @@ func TestBackupImport_Simple_NoError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.BackupImport{ - ImportContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ImportContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, }, testUtils.Request{ Request: ` @@ -60,7 +60,7 @@ func TestBackupImport_WithInvalidCollection_ReturnError(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.BackupImport{ - ImportContent: `{"Invalid":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ImportContent: `{"Invalid":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, ExpectedError: "failed to get collection: datastore: key not found. Name: Invalid", }, }, @@ -77,8 +77,8 @@ func TestBackupImport_WithDocAlreadyExists_ReturnError(t *testing.T) { Doc: `{"name": "John", "age": 30}`, }, testUtils.BackupImport{ - ImportContent: `{"User":[{"_key":"bae-e933420a-988a-56f8-8952-6c245aebd519","_newKey":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, - ExpectedError: "a document with the given dockey already exists", + ImportContent: `{"User":[{"_docID":"bae-e933420a-988a-56f8-8952-6c245aebd519","_docIDNew":"bae-e933420a-988a-56f8-8952-6c245aebd519","age":30,"name":"John"}]}`, + ExpectedError: "a document with the given ID already exists", }, }, } diff --git a/tests/integration/collection/update/simple/with_key_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go similarity index 81% rename from tests/integration/collection/update/simple/with_key_test.go rename to tests/integration/collection/update/simple/with_doc_id_test.go index b3a269271f..228438b58b 100644 --- a/tests/integration/collection/update/simple/with_key_test.go +++ b/tests/integration/collection/update/simple/with_doc_id_test.go @@ -20,7 +20,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) -func TestUpdateWithKey(t *testing.T) { +func TestUpdateWithDocID(t *testing.T) { docStr := `{ "name": "John", "age": 21 @@ -33,7 +33,7 @@ func TestUpdateWithKey(t *testing.T) { tests := []testUtils.TestCase{ { - Description: "Test update users with key and invalid JSON", + Description: "Test update users with docID and invalid JSON", Docs: map[string][]string{ "Users": {docStr}, }, @@ -41,7 +41,7 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `{ + _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ name: "Eric" }`) return err @@ -50,7 +50,7 @@ func TestUpdateWithKey(t *testing.T) { }, ExpectedError: "cannot parse JSON: cannot parse object", }, { - Description: "Test update users with key and invalid updator", + Description: "Test update users with docID and invalid updator", Docs: map[string][]string{ "Users": {docStr}, }, @@ -58,14 +58,14 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `"name: Eric"`) + _, err := c.UpdateWithDocID(ctx, doc.ID(), `"name: Eric"`) return err }, }, }, ExpectedError: "the updater of a document is of invalid type", }, { - Description: "Test update users with key and patch updator (not implemented so no change)", + Description: "Test update users with docID and patch updator (not implemented so no change)", Docs: map[string][]string{ "Users": {docStr}, }, @@ -73,7 +73,7 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `[ + _, err := c.UpdateWithDocID(ctx, doc.ID(), `[ { "name": "Eric" }, { @@ -84,7 +84,7 @@ func TestUpdateWithKey(t *testing.T) { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } @@ -101,7 +101,7 @@ func TestUpdateWithKey(t *testing.T) { }, }, }, { - Description: "Test update users with key", + Description: "Test update users with docID", Docs: map[string][]string{ "Users": {docStr}, }, @@ -109,14 +109,14 @@ func TestUpdateWithKey(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKey(ctx, doc.Key(), `{ + _, err := c.UpdateWithDocID(ctx, doc.ID(), `{ "name": "Eric" }`) if err != nil { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_keys_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go similarity index 79% rename from tests/integration/collection/update/simple/with_keys_test.go rename to tests/integration/collection/update/simple/with_doc_ids_test.go index d36e140852..f32818db39 100644 --- a/tests/integration/collection/update/simple/with_keys_test.go +++ b/tests/integration/collection/update/simple/with_doc_ids_test.go @@ -20,7 +20,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" ) -func TestUpdateWithKeys(t *testing.T) { +func TestUpdateWithDocIDs(t *testing.T) { docStr1 := `{ "name": "John", "age": 21 @@ -43,7 +43,7 @@ func TestUpdateWithKeys(t *testing.T) { tests := []testUtils.TestCase{ { - Description: "Test update users with keys and invalid JSON", + Description: "Test update users with docIDs and invalid JSON", Docs: map[string][]string{ "Users": { docStr1, @@ -54,7 +54,7 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `{ + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ name: "Eric" }`) return err @@ -63,7 +63,7 @@ func TestUpdateWithKeys(t *testing.T) { }, ExpectedError: "cannot parse JSON: cannot parse object", }, { - Description: "Test update users with keys and invalid updator", + Description: "Test update users with docIDs and invalid updator", Docs: map[string][]string{ "Users": { docStr1, @@ -74,14 +74,14 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `"name: Eric"`) + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `"name: Eric"`) return err }, }, }, ExpectedError: "the updater of a document is of invalid type", }, { - Description: "Test update users with keys and patch updator (not implemented so no change)", + Description: "Test update users with docIDs and patch updator (not implemented so no change)", Docs: map[string][]string{ "Users": { docStr1, @@ -92,7 +92,7 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `[ + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `[ { "name": "Eric" }, { @@ -103,7 +103,7 @@ func TestUpdateWithKeys(t *testing.T) { return err } - d, err := c.Get(ctx, doc1.Key(), false) + d, err := c.Get(ctx, doc1.ID(), false) if err != nil { return err } @@ -115,7 +115,7 @@ func TestUpdateWithKeys(t *testing.T) { assert.Equal(t, "John", name) - d2, err := c.Get(ctx, doc2.Key(), false) + d2, err := c.Get(ctx, doc2.ID(), false) if err != nil { return err } @@ -132,7 +132,7 @@ func TestUpdateWithKeys(t *testing.T) { }, }, }, { - Description: "Test update users with keys", + Description: "Test update users with docIDs", Docs: map[string][]string{ "Users": { docStr1, @@ -143,14 +143,14 @@ func TestUpdateWithKeys(t *testing.T) { "Users": []func(c client.Collection) error{ func(c client.Collection) error { ctx := context.Background() - _, err := c.UpdateWithKeys(ctx, []client.DocKey{doc1.Key(), doc2.Key()}, `{ + _, err := c.UpdateWithDocIDs(ctx, []client.DocID{doc1.ID(), doc2.ID()}, `{ "age": 40 }`) if err != nil { return err } - d, err := c.Get(ctx, doc1.Key(), false) + d, err := c.Get(ctx, doc1.ID(), false) if err != nil { return err } @@ -162,7 +162,7 @@ func TestUpdateWithKeys(t *testing.T) { assert.Equal(t, int64(40), name) - d2, err := c.Get(ctx, doc2.Key(), false) + d2, err := c.Get(ctx, doc2.ID(), false) if err != nil { return err } diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index 7d04c0ec04..de2d24f8e2 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -130,7 +130,7 @@ func TestUpdateWithFilter(t *testing.T) { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } @@ -162,7 +162,7 @@ func TestUpdateWithFilter(t *testing.T) { return err } - d, err := c.Get(ctx, doc.Key(), false) + d, err := c.Get(ctx, doc.ID(), false) if err != nil { return err } diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index d93aafc31a..0c780c8fde 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -30,7 +30,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { ), ) assert.Nil(t, err) - docKey1 := doc1.Key().String() + docID1 := doc1.ID().String() doc2, err := client.NewDocFromJSON( []byte( @@ -40,7 +40,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { ), ) assert.Nil(t, err) - docKey2 := doc2.Key().String() + docID2 := doc2.ID().String() test := testUtils.TestCase{ CollectionCalls: map[string][]func(client.Collection){ @@ -57,10 +57,10 @@ func TestEventsSimpleWithCreate(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some(docKey1), + DocID: immutable.Some(docID1), }, { - DocKey: immutable.Some(docKey2), + DocID: immutable.Some(docID2), }, }, } diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index 40b9cef428..962a16e39a 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -29,7 +29,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { ctx, `mutation { create_Users(data: "{\"name\": \"John\"}") { - _key + _docID } }`, ) @@ -44,7 +44,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { ctx, `mutation { create_Users(data: "{\"name\": \"Shahzad\"}") { - _key + _docID } }`, ) @@ -56,7 +56,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some("bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"), + DocID: immutable.Some("bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"), }, // No event should be received for Shahzad, as the transaction was discarded. }, diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index f84e42f672..df811cd648 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -30,7 +30,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { ), ) assert.Nil(t, err) - docKey1 := doc1.Key().String() + docID1 := doc1.ID().String() test := testUtils.TestCase{ CollectionCalls: map[string][]func(client.Collection){ @@ -40,7 +40,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { assert.Nil(t, err) }, func(c client.Collection) { - wasDeleted, err := c.Delete(context.Background(), doc1.Key()) + wasDeleted, err := c.Delete(context.Background(), doc1.ID()) assert.Nil(t, err) assert.True(t, wasDeleted) }, @@ -48,10 +48,10 @@ func TestEventsSimpleWithDelete(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some(docKey1), + DocID: immutable.Some(docID1), }, { - DocKey: immutable.Some(docKey1), + DocID: immutable.Some(docID1), }, }, } diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index f496678e28..30b8cab9a4 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -30,7 +30,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ), ) assert.Nil(t, err) - docKey1 := doc1.Key().String() + docID1 := doc1.ID().String() doc2, err := client.NewDocFromJSON( []byte( @@ -40,7 +40,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ), ) assert.Nil(t, err) - docKey2 := doc2.Key().String() + docID2 := doc2.ID().String() test := testUtils.TestCase{ CollectionCalls: map[string][]func(client.Collection){ @@ -63,15 +63,15 @@ func TestEventsSimpleWithUpdate(t *testing.T) { }, ExpectedUpdates: []testUtils.ExpectedUpdate{ { - DocKey: immutable.Some(docKey1), - Cid: immutable.Some("bafybeifwfw3g4q6tagffdwq4orrouoosdlsc5rb67q2uj7oplkq7ax5ysm"), + DocID: immutable.Some(docID1), + Cid: immutable.Some("bafybeicbv34oa4hfcnqbka3jqnby4g75ttlj4wfvc7zhvat5xca45ggq2u"), }, { - DocKey: immutable.Some(docKey2), + DocID: immutable.Some(docID2), }, { - DocKey: immutable.Some(docKey1), - Cid: immutable.Some("bafybeihdhik6m5o7cxei7f7ie6lnnbwnjsn42ne6cxab6g7dgi7k2uiiu4"), + DocID: immutable.Some(docID1), + Cid: immutable.Some("bafybeiep6f7sls7z325oqd5oddigxq3fkxwpp5b7um47yz5erxfybjd6ra"), }, }, } diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index c461ed5cc3..30b65bc189 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -54,7 +54,7 @@ type TestCase struct { // are `None` the Update event will still be expected and will contribute // to the asserted count. type ExpectedUpdate struct { - DocKey immutable.Option[string] + DocID immutable.Option[string] // The expected Cid, as a string (results in much more readable errors) Cid immutable.Option[string] SchemaRoot immutable.Option[string] @@ -96,7 +96,7 @@ func ExecuteRequestTestCase( expectedEvent := testCase.ExpectedUpdates[indexOfNextExpectedUpdate] assertIfExpected(t, expectedEvent.Cid, update.Cid.String()) - assertIfExpected(t, expectedEvent.DocKey, update.DocKey) + assertIfExpected(t, expectedEvent.DocID, update.DocID) assertIfExpected(t, expectedEvent.Priority, update.Priority) assertIfExpected(t, expectedEvent.SchemaRoot, update.SchemaRoot) diff --git a/tests/integration/explain/debug/dagscan_test.go b/tests/integration/explain/debug/dagscan_test.go index 647d378907..010f866dd7 100644 --- a/tests/integration/explain/debug/dagscan_test.go +++ b/tests/integration/explain/debug/dagscan_test.go @@ -38,7 +38,7 @@ func TestDebugExplainCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { links { cid } @@ -56,7 +56,7 @@ func TestDebugExplainCommitsDagScanQueryOp(t *testing.T) { func TestDebugExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) commits query-op with only dockey (no field).", + Description: "Explain (debug) commits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -64,7 +64,7 @@ func TestDebugExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { links { cid } @@ -90,7 +90,7 @@ func TestDebugExplainLatestCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { cid links { cid @@ -109,7 +109,7 @@ func TestDebugExplainLatestCommitsDagScanQueryOp(t *testing.T) { func TestDebugExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) latestCommits query-op with only dockey (no field).", + Description: "Explain (debug) latestCommits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -117,7 +117,7 @@ func TestDebugExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { cid links { cid @@ -133,10 +133,10 @@ func TestDebugExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { +func TestDebugExplainLatestCommitsDagScanWithoutDocID_Failure(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) latestCommits query without DocKey.", + Description: "Explain (debug) latestCommits query without docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -152,7 +152,7 @@ func TestDebugExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } @@ -179,7 +179,7 @@ func TestDebugExplainLatestCommitsDagScanWithoutAnyArguments_Failure(t *testing. } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } diff --git a/tests/integration/explain/debug/delete_test.go b/tests/integration/explain/debug/delete_test.go index 083c6163c2..88159152e0 100644 --- a/tests/integration/explain/debug/delete_test.go +++ b/tests/integration/explain/debug/delete_test.go @@ -41,7 +41,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { Request: `mutation @explain(type: debug) { delete_Author(filter: {name: {_eq: "Shahzad"}}) { - _key + _docID } }`, @@ -65,7 +65,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t *te Request: `mutation @explain(type: debug) { delete_Author(filter: {}) { - DeletedKeyByFilter: _key + DeletedKeyByFilter: _docID } }`, @@ -80,7 +80,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t *te func TestDebugExplainMutationRequestWithDeleteUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) mutation request with delete using id.", + Description: "Explain (debug) mutation request with delete using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -88,8 +88,8 @@ func TestDebugExplainMutationRequestWithDeleteUsingId(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(id: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { - _key + delete_Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + _docID } }`, @@ -112,11 +112,11 @@ func TestDebugExplainMutationRequestWithDeleteUsingIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(ids: [ + delete_Author(docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ]) { - AliasKey: _key + AliasKey: _docID } }`, @@ -139,8 +139,8 @@ func TestDebugExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - delete_Author(ids: []) { - _key + delete_Author(docIDs: []) { + _docID } }`, @@ -164,7 +164,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) { Request: `mutation @explain(type: debug) { delete_Author( - ids: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], filter: { _and: [ {age: {_lt: 26}}, @@ -172,7 +172,7 @@ func TestDebugExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) { ] } ) { - _key + _docID } }`, diff --git a/tests/integration/explain/debug/delete_with_error_test.go b/tests/integration/explain/debug/delete_with_error_test.go index 208106e098..a7c5fc1d3a 100644 --- a/tests/integration/explain/debug/delete_with_error_test.go +++ b/tests/integration/explain/debug/delete_with_error_test.go @@ -29,7 +29,7 @@ func TestDebugExplainMutationRequestWithDeleteHavingNoSubSelection(t *testing.T) Request: `mutation @explain(type: debug) { delete_Author( - ids: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/debug/group_with_dockey_child_test.go b/tests/integration/explain/debug/group_with_doc_id_child_test.go similarity index 80% rename from tests/integration/explain/debug/group_with_dockey_child_test.go rename to tests/integration/explain/debug/group_with_doc_id_child_test.go index bc6555b961..43301f1fac 100644 --- a/tests/integration/explain/debug/group_with_dockey_child_test.go +++ b/tests/integration/explain/debug/group_with_doc_id_child_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { +func TestDebugExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys on inner _group.", + Description: "Explain (debug) request with docIDs on inner _group.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -32,7 +32,7 @@ func TestDebugExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { groupBy: [age] ) { age - _group(dockeys: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { + _group(docIDs: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { name } } diff --git a/tests/integration/explain/debug/group_with_dockey_test.go b/tests/integration/explain/debug/group_with_doc_id_test.go similarity index 78% rename from tests/integration/explain/debug/group_with_dockey_test.go rename to tests/integration/explain/debug/group_with_doc_id_test.go index fc53731c6a..ebbfbdb3c9 100644 --- a/tests/integration/explain/debug/group_with_dockey_test.go +++ b/tests/integration/explain/debug/group_with_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { +func TestDebugExplainRequestWithDocIDOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with a dockey on parent groupBy.", + Description: "Explain (debug) request with a document ID on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -30,7 +30,7 @@ func TestDebugExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { Request: `query @explain(type: debug) { Author( groupBy: [age], - dockey: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" + docID: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" ) { age _group { @@ -47,10 +47,10 @@ func TestDebugExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) { +func TestDebugExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys and filter on parent groupBy.", + Description: "Explain (debug) request with document IDs and filter on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -61,7 +61,7 @@ func TestDebugExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) { Author( groupBy: [age], filter: {age: {_eq: 20}}, - dockeys: [ + docIDs: [ "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254", "bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed" ] diff --git a/tests/integration/explain/debug/type_join_with_filter_and_key_test.go b/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go similarity index 90% rename from tests/integration/explain/debug/type_join_with_filter_and_key_test.go rename to tests/integration/explain/debug/type_join_with_filter_doc_id_test.go index 5219c5c874..5a8f2c5ba2 100644 --- a/tests/integration/explain/debug/type_join_with_filter_and_key_test.go +++ b/tests/integration/explain/debug/type_join_with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { +func TestDebugExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with related and regular filter + keys.", + Description: "Explain (debug) request with related and regular filter + docIDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -33,7 +33,7 @@ func TestDebugExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { name: {_eq: "John Grisham"}, books: {name: {_eq: "Painted House"}} }, - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e" ] @@ -63,10 +63,10 @@ func TestDebugExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { +func TestDebugExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with many related filters + key.", + Description: "Explain (debug) request with many related filters + docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -80,7 +80,7 @@ func TestDebugExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { articles: {name: {_eq: "To my dear readers"}}, books: {name: {_eq: "Theif Lord"}} }, - dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] ) { name age diff --git a/tests/integration/explain/debug/update_test.go b/tests/integration/explain/debug/update_test.go index 8a479837d6..8c8ed82f0b 100644 --- a/tests/integration/explain/debug/update_test.go +++ b/tests/integration/explain/debug/update_test.go @@ -48,7 +48,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) { }, data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -74,13 +74,13 @@ func TestDebugExplainMutationRequestWithUpdateUsingIds(t *testing.T) { Request: `mutation @explain(type: debug) { update_Author( - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -97,7 +97,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingIds(t *testing.T) { func TestDebugExplainMutationRequestWithUpdateUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) mutation request with update using id.", + Description: "Explain (debug) mutation request with update using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -106,10 +106,10 @@ func TestDebugExplainMutationRequestWithUpdateUsingId(t *testing.T) { Request: `mutation @explain(type: debug) { update_Author( - id: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", + docID: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -140,13 +140,13 @@ func TestDebugExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) { _eq: true } }, - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } diff --git a/tests/integration/explain/debug/with_filter_key_test.go b/tests/integration/explain/debug/with_filter_doc_id_test.go similarity index 72% rename from tests/integration/explain/debug/with_filter_key_test.go rename to tests/integration/explain/debug/with_filter_doc_id_test.go index 5ca0939150..89bf3f35aa 100644 --- a/tests/integration/explain/debug/with_filter_key_test.go +++ b/tests/integration/explain/debug/with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDebugExplainRequestWithDocKeyFilter(t *testing.T) { +func TestDebugExplainRequestWithDocIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockey filter.", + Description: "Explain (debug) request with docID filter.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -28,7 +28,7 @@ func TestDebugExplainRequestWithDocKeyFilter(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - Author(dockey: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { name age } @@ -42,10 +42,10 @@ func TestDebugExplainRequestWithDocKeyFilter(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { +func TestDebugExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys filter using one key.", + Description: "Explain (debug) request with docIDs filter using one ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -53,7 +53,7 @@ func TestDebugExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: debug) { - Author(dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { + Author(docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { name age } @@ -67,10 +67,10 @@ func TestDebugExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *testing.T) { +func TestDebugExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys filter using multiple but duplicate keys.", + Description: "Explain (debug) request with docIDs filter using multiple but duplicate IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -79,7 +79,7 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *te Request: `query @explain(type: debug) { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ] @@ -97,10 +97,10 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *te explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing.T) { +func TestDebugExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with dockeys filter using multiple unique keys.", + Description: "Explain (debug) request with docIDs filter using multiple unique IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -109,7 +109,7 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing. Request: `query @explain(type: debug) { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] @@ -127,10 +127,10 @@ func TestDebugExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing. explainUtils.ExecuteTestCase(t, test) } -func TestDebugExplainRequestWithMatchingKeyFilter(t *testing.T) { +func TestDebugExplainRequestWithMatchingIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (debug) request with a filter to match key.", + Description: "Explain (debug) request with a filter to match ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -140,7 +140,7 @@ func TestDebugExplainRequestWithMatchingKeyFilter(t *testing.T) { Request: `query @explain(type: debug) { Author( filter: { - _key: { + _docID: { _eq: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" } } diff --git a/tests/integration/explain/debug/with_sum_join_test.go b/tests/integration/explain/debug/with_sum_join_test.go index 66c68be8ed..d098eec2f1 100644 --- a/tests/integration/explain/debug/with_sum_join_test.go +++ b/tests/integration/explain/debug/with_sum_join_test.go @@ -44,7 +44,7 @@ func TestDebugExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) { Request: `query @explain(type: debug) { Author { name - _key + _docID TotalPages: _sum( books: {field: pages} ) diff --git a/tests/integration/explain/default/basic_test.go b/tests/integration/explain/default/basic_test.go index 2d7f515d9e..30a5810de6 100644 --- a/tests/integration/explain/default/basic_test.go +++ b/tests/integration/explain/default/basic_test.go @@ -64,7 +64,7 @@ func TestDefaultExplainRequestWithFullBasicGraph(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/dagscan_test.go b/tests/integration/explain/default/dagscan_test.go index a83402bb67..c18f365f82 100644 --- a/tests/integration/explain/default/dagscan_test.go +++ b/tests/integration/explain/default/dagscan_test.go @@ -38,7 +38,7 @@ func TestDefaultExplainCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { links { cid } @@ -73,7 +73,7 @@ func TestDefaultExplainCommitsDagScanQueryOp(t *testing.T) { func TestDefaultExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) commits query-op with only dockey (no field).", + Description: "Explain (default) commits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -81,7 +81,7 @@ func TestDefaultExplainCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - commits (dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + commits (docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { links { cid } @@ -124,7 +124,7 @@ func TestDefaultExplainLatestCommitsDagScanQueryOp(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", fieldId: "1") { cid links { cid @@ -160,7 +160,7 @@ func TestDefaultExplainLatestCommitsDagScanQueryOp(t *testing.T) { func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) latestCommits query-op with only dockey (no field).", + Description: "Explain (default) latestCommits query-op with only docID (no field).", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -168,7 +168,7 @@ func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - latestCommits(dockey: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { + latestCommits(docID: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3") { cid links { cid @@ -201,10 +201,10 @@ func TestDefaultExplainLatestCommitsDagScanQueryOpWithoutField(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { +func TestDefaultExplainLatestCommitsDagScanWithoutDocID_Failure(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) latestCommits query without DocKey.", + Description: "Explain (default) latestCommits query without docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -220,7 +220,7 @@ func TestDefaultExplainLatestCommitsDagScanWithoutDocKey_Failure(t *testing.T) { } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } @@ -247,7 +247,7 @@ func TestDefaultExplainLatestCommitsDagScanWithoutAnyArguments_Failure(t *testin } }`, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", }, }, } diff --git a/tests/integration/explain/default/delete_test.go b/tests/integration/explain/default/delete_test.go index 71f454b6e7..660785f6a4 100644 --- a/tests/integration/explain/default/delete_test.go +++ b/tests/integration/explain/default/delete_test.go @@ -41,7 +41,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { Request: `mutation @explain { delete_Author(filter: {name: {_eq: "Shahzad"}}) { - _key + _docID } }`, @@ -57,7 +57,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilter(t *testing.T) { "_eq": "Shahzad", }, }, - "ids": []string(nil), + "docIDs": []string(nil), }, }, @@ -100,7 +100,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * Request: `mutation @explain { delete_Author(filter: {}) { - DeletedKeyByFilter: _key + DeletedKeyByFilter: _docID } }`, @@ -112,7 +112,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string(nil), + "docIDs": []string(nil), }, }, @@ -142,7 +142,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterToMatchEverything(t * func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) mutation request with delete using id.", + Description: "Explain (default) mutation request with delete using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -150,8 +150,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(id: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { - _key + delete_Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + _docID } }`, @@ -163,7 +163,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingId(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, }, @@ -203,11 +203,11 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(ids: [ + delete_Author(docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ]) { - AliasKey: _key + AliasKey: _docID } }`, @@ -219,7 +219,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingIds(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, @@ -264,8 +264,8 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - delete_Author(ids: []) { - _key + delete_Author(docIDs: []) { + _docID } }`, @@ -277,7 +277,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingNoIds(t *testing.T) { IncludeChildNodes: false, ExpectedAttributes: dataMap{ "filter": nil, - "ids": []string{}, + "docIDs": []string{}, }, }, @@ -311,7 +311,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) Request: `mutation @explain { delete_Author( - ids: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test"], filter: { _and: [ {age: {_lt: 26}}, @@ -319,7 +319,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) ] } ) { - _key + _docID } }`, @@ -344,7 +344,7 @@ func TestDefaultExplainMutationRequestWithDeleteUsingFilterAndIds(t *testing.T) }, }, }, - "ids": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "test", }, diff --git a/tests/integration/explain/default/delete_with_error_test.go b/tests/integration/explain/default/delete_with_error_test.go index cbe11c2591..14ef207451 100644 --- a/tests/integration/explain/default/delete_with_error_test.go +++ b/tests/integration/explain/default/delete_with_error_test.go @@ -29,7 +29,7 @@ func TestDefaultExplainMutationRequestWithDeleteHavingNoSubSelection(t *testing. Request: `mutation @explain { delete_Author( - ids: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] diff --git a/tests/integration/explain/default/fixture.go b/tests/integration/explain/default/fixture.go index 306e3e133f..399a59ac20 100644 --- a/tests/integration/explain/default/fixture.go +++ b/tests/integration/explain/default/fixture.go @@ -24,7 +24,7 @@ var basicPattern = dataMap{ var emptyChildSelectsAttributeForAuthor = dataMap{ "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "limit": nil, diff --git a/tests/integration/explain/default/group_with_average_test.go b/tests/integration/explain/default/group_with_average_test.go index 32f935785d..4346ec6a9a 100644 --- a/tests/integration/explain/default/group_with_average_test.go +++ b/tests/integration/explain/default/group_with_average_test.go @@ -63,7 +63,7 @@ func TestDefaultExplainRequestWithGroupByWithAverageOnAnInnerField(t *testing.T) "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "orderBy": nil, @@ -155,7 +155,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupOnAField(t *testing. { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "orderBy": nil, "filter": nil, @@ -237,7 +237,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupOnAFieldAndNestedGro { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "orderBy": nil, "filter": nil, @@ -320,7 +320,7 @@ func TestDefaultExplainRequestWithAverageInsideTheInnerGroupAndNestedGroupByWith { "collectionName": "Author", "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "orderBy": nil, "filter": nil, diff --git a/tests/integration/explain/default/group_with_dockey_child_test.go b/tests/integration/explain/default/group_with_doc_id_child_test.go similarity index 86% rename from tests/integration/explain/default/group_with_dockey_child_test.go rename to tests/integration/explain/default/group_with_doc_id_child_test.go index 35726cda32..6ce3b8c041 100644 --- a/tests/integration/explain/default/group_with_dockey_child_test.go +++ b/tests/integration/explain/default/group_with_doc_id_child_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsOnInnerGroupSelection(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys on inner _group.", + Description: "Explain (default) request with docIDs on inner _group.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -32,7 +32,7 @@ func TestDefaultExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { groupBy: [age] ) { age - _group(dockeys: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { + _group(docIDs: ["bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"]) { name } } @@ -49,7 +49,7 @@ func TestDefaultExplainRequestWithDockeysOnInnerGroupSelection(t *testing.T) { "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": []string{"bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"}, + "docIDs": []string{"bae-6a4c5bc5-b044-5a03-a868-8260af6f2254"}, "filter": nil, "groupBy": nil, "limit": nil, diff --git a/tests/integration/explain/default/group_with_dockey_test.go b/tests/integration/explain/default/group_with_doc_id_test.go similarity index 89% rename from tests/integration/explain/default/group_with_dockey_test.go rename to tests/integration/explain/default/group_with_doc_id_test.go index 31555bc94a..d458ad9015 100644 --- a/tests/integration/explain/default/group_with_dockey_test.go +++ b/tests/integration/explain/default/group_with_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { +func TestDefaultExplainRequestWithDocIDOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with a dockey on parent groupBy.", + Description: "Explain (default) request with a docID on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -30,7 +30,7 @@ func TestDefaultExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { Request: `query @explain { Author( groupBy: [age], - dockey: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" + docID: "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254" ) { age _group { @@ -75,10 +75,10 @@ func TestDefaultExplainRequestWithDockeyOnParentGroupBy(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsAndFilterOnParentGroupBy(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys and filter on parent groupBy.", + Description: "Explain (default) request with docIDs and filter on parent groupBy.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -89,7 +89,7 @@ func TestDefaultExplainRequestWithDockeysAndFilterOnParentGroupBy(t *testing.T) Author( groupBy: [age], filter: {age: {_eq: 20}}, - dockeys: [ + docIDs: [ "bae-6a4c5bc5-b044-5a03-a868-8260af6f2254", "bae-4ea9d148-13f3-5a48-a0ef-9ffd344caeed" ] diff --git a/tests/integration/explain/default/group_with_filter_child_test.go b/tests/integration/explain/default/group_with_filter_child_test.go index bcb53e0e86..a8522962eb 100644 --- a/tests/integration/explain/default/group_with_filter_child_test.go +++ b/tests/integration/explain/default/group_with_filter_child_test.go @@ -47,7 +47,7 @@ func TestDefaultExplainRequestWithFilterOnInnerGroupSelection(t *testing.T) { "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "filter": dataMap{ "age": dataMap{ "_gt": int32(63), @@ -116,7 +116,7 @@ func TestDefaultExplainRequestWithFilterOnParentGroupByAndInnerGroupSelection(t "childSelects": []dataMap{ { "collectionName": "Author", - "docKeys": nil, + "docIDs": nil, "filter": dataMap{ "age": dataMap{ "_gt": int32(63), diff --git a/tests/integration/explain/default/group_with_limit_child_test.go b/tests/integration/explain/default/group_with_limit_child_test.go index 13d4730638..fb6dc83f77 100644 --- a/tests/integration/explain/default/group_with_limit_child_test.go +++ b/tests/integration/explain/default/group_with_limit_child_test.go @@ -51,7 +51,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnInnerGroupSelection(t *testing "limit": uint64(2), "offset": uint64(1), }, - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "orderBy": nil, @@ -104,7 +104,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnMultipleInnerGroupSelections(t "limit": uint64(1), "offset": uint64(2), }, - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "orderBy": nil, @@ -115,7 +115,7 @@ func TestDefaultExplainRequestWithLimitAndOffsetOnMultipleInnerGroupSelections(t "limit": uint64(2), "offset": uint64(0), }, - "docKeys": nil, + "docIDs": nil, "filter": nil, "groupBy": nil, "orderBy": nil, diff --git a/tests/integration/explain/default/group_with_limit_test.go b/tests/integration/explain/default/group_with_limit_test.go index 967cda469c..b88496c7dd 100644 --- a/tests/integration/explain/default/group_with_limit_test.go +++ b/tests/integration/explain/default/group_with_limit_test.go @@ -121,7 +121,7 @@ func TestDefaultExplainRequestWithLimitOnParentGroupByAndInnerGroupSelection(t * "offset": uint64(0), }, "orderBy": nil, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "filter": nil, }, diff --git a/tests/integration/explain/default/group_with_order_child_test.go b/tests/integration/explain/default/group_with_order_child_test.go index 55d14ef469..e8ba14d697 100644 --- a/tests/integration/explain/default/group_with_order_child_test.go +++ b/tests/integration/explain/default/group_with_order_child_test.go @@ -53,7 +53,7 @@ func TestDefaultExplainRequestWithDescendingOrderOnInnerGroupSelection(t *testin "fields": []string{"age"}, }, }, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "filter": nil, @@ -105,7 +105,7 @@ func TestDefaultExplainRequestWithAscendingOrderOnInnerGroupSelection(t *testing "fields": []string{"age"}, }, }, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "filter": nil, @@ -164,7 +164,7 @@ func TestDefaultExplainRequestWithOrderOnNestedParentGroupByAndOnNestedParentsIn }, }, "groupBy": []string{"verified", "name"}, - "docKeys": nil, + "docIDs": nil, "limit": nil, "filter": nil, }, diff --git a/tests/integration/explain/default/group_with_order_test.go b/tests/integration/explain/default/group_with_order_test.go index 7de88087df..43e6b7ba05 100644 --- a/tests/integration/explain/default/group_with_order_test.go +++ b/tests/integration/explain/default/group_with_order_test.go @@ -180,7 +180,7 @@ func TestDefaultExplainRequestWithOrderOnParentGroupByAndOnInnerGroupSelection(t "fields": []string{"age"}, }, }, - "docKeys": nil, + "docIDs": nil, "groupBy": nil, "limit": nil, "filter": nil, diff --git a/tests/integration/explain/default/invalid_type_arg_test.go b/tests/integration/explain/default/invalid_type_arg_test.go index 391d56492a..2759eb8bd0 100644 --- a/tests/integration/explain/default/invalid_type_arg_test.go +++ b/tests/integration/explain/default/invalid_type_arg_test.go @@ -28,7 +28,7 @@ func TestInvalidExplainRequestTypeReturnsError(t *testing.T) { Request: `query @explain(type: invalid) { Author { - _key + _docID name age } diff --git a/tests/integration/explain/default/type_join_many_test.go b/tests/integration/explain/default/type_join_many_test.go index 9fa66bf76a..3b700b132b 100644 --- a/tests/integration/explain/default/type_join_many_test.go +++ b/tests/integration/explain/default/type_join_many_test.go @@ -84,7 +84,7 @@ func TestDefaultExplainRequestWithAOneToManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_one_test.go b/tests/integration/explain/default/type_join_one_test.go index 472a6f2164..8a7fac0925 100644 --- a/tests/integration/explain/default/type_join_one_test.go +++ b/tests/integration/explain/default/type_join_one_test.go @@ -85,7 +85,7 @@ func TestDefaultExplainRequestWithAOneToOneJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, @@ -225,7 +225,7 @@ func TestDefaultExplainRequestWithTwoLevelDeepNestedJoins(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_test.go b/tests/integration/explain/default/type_join_test.go index c3ca250565..fd1676aed9 100644 --- a/tests/integration/explain/default/type_join_test.go +++ b/tests/integration/explain/default/type_join_test.go @@ -119,7 +119,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, @@ -177,7 +177,7 @@ func TestDefaultExplainRequestWith2SingleJoinsAnd1ManyJoin(t *testing.T) { ExpectedAttributes: dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/explain/default/type_join_with_filter_and_key_test.go b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go similarity index 92% rename from tests/integration/explain/default/type_join_with_filter_and_key_test.go rename to tests/integration/explain/default/type_join_with_filter_doc_id_test.go index 2290de03e6..7b320b01b7 100644 --- a/tests/integration/explain/default/type_join_with_filter_and_key_test.go +++ b/tests/integration/explain/default/type_join_with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { +func TestDefaultExplainRequestWithRelatedAndRegularFilterAndDocIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with related and regular filter + keys.", + Description: "Explain (default) request with related and regular filter + docIDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -33,7 +33,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { name: {_eq: "John Grisham"}, books: {name: {_eq: "Painted House"}} }, - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e" ] @@ -59,7 +59,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f8e", }, @@ -103,10 +103,10 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilterAndKeys(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { +func TestDefaultExplainRequestWithManyRelatedFiltersAndDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with many related filters + key.", + Description: "Explain (default) request with many related filters + docID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -120,7 +120,7 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { articles: {name: {_eq: "To my dear readers"}}, books: {name: {_eq: "Theif Lord"}} }, - dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] + docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"] ) { name age @@ -150,7 +150,7 @@ func TestDefaultExplainRequestWithManyRelatedFiltersAndKey(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": dataMap{ diff --git a/tests/integration/explain/default/type_join_with_filter_test.go b/tests/integration/explain/default/type_join_with_filter_test.go index 799ad2677d..78ed484b0c 100644 --- a/tests/integration/explain/default/type_join_with_filter_test.go +++ b/tests/integration/explain/default/type_join_with_filter_test.go @@ -55,7 +55,7 @@ func TestDefaultExplainRequestWithRelatedAndRegularFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": nil, + "docIDs": nil, "filter": dataMap{ "books": dataMap{ "name": dataMap{ @@ -138,7 +138,7 @@ func TestDefaultExplainRequestWithManyRelatedFilters(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": nil, + "docIDs": nil, "filter": dataMap{ "articles": dataMap{ "name": dataMap{ diff --git a/tests/integration/explain/default/update_test.go b/tests/integration/explain/default/update_test.go index 0b5ee28920..cd2af141c3 100644 --- a/tests/integration/explain/default/update_test.go +++ b/tests/integration/explain/default/update_test.go @@ -48,7 +48,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) }, data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -69,7 +69,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) "_eq": true, }, }, - "ids": []string(nil), + "docIDs": []string(nil), }, }, { @@ -111,13 +111,13 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { Request: `mutation @explain { update_Author( - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -134,7 +134,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { "age": float64(59), }, "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, @@ -170,7 +170,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) mutation request with update using id.", + Description: "Explain (default) mutation request with update using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -179,10 +179,10 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { Request: `mutation @explain { update_Author( - id: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", + docID: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -199,7 +199,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { "age": float64(59), }, "filter": nil, - "ids": []string{ + "docIDs": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, }, @@ -244,13 +244,13 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) _eq: true } }, - ids: [ + docIDs: [ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], data: "{\"age\": 59}" ) { - _key + _docID name age } @@ -271,7 +271,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) "_eq": true, }, }, - "ids": []string{ + "docIDs": []string{ "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, diff --git a/tests/integration/explain/default/with_filter_key_test.go b/tests/integration/explain/default/with_filter_doc_id_test.go similarity index 85% rename from tests/integration/explain/default/with_filter_key_test.go rename to tests/integration/explain/default/with_filter_doc_id_test.go index 7f181a07f5..a5807d1da7 100644 --- a/tests/integration/explain/default/with_filter_key_test.go +++ b/tests/integration/explain/default/with_filter_doc_id_test.go @@ -17,10 +17,10 @@ import ( explainUtils "github.com/sourcenetwork/defradb/tests/integration/explain" ) -func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { +func TestDefaultExplainRequestWithDocIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockey filter.", + Description: "Explain (default) request with docID filter.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -28,7 +28,7 @@ func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - Author(dockey: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { + Author(docID: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d") { name age } @@ -40,7 +40,7 @@ func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": nil, @@ -69,10 +69,10 @@ func TestDefaultExplainRequestWithDocKeyFilter(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsFilterUsingOneID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys filter using one key.", + Description: "Explain (default) request with docIDs filter using one ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -80,7 +80,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain { - Author(dockeys: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { + Author(docIDs: ["bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d"]) { name age } @@ -92,7 +92,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, "filter": nil, @@ -121,10 +121,10 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingOneKey(t *testing.T) { explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleButDuplicateIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys filter using multiple but duplicate keys.", + Description: "Explain (default) request with docIDs filter using multiple but duplicate IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -133,7 +133,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t * Request: `query @explain { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ] @@ -149,7 +149,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t * { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, @@ -183,10 +183,10 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleButDuplicateKeys(t * explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testing.T) { +func TestDefaultExplainRequestWithDocIDsFilterUsingMultipleUniqueIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with dockeys filter using multiple unique keys.", + Description: "Explain (default) request with docIDs filter using multiple unique IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -195,7 +195,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testin Request: `query @explain { Author( - dockeys: [ + docIDs: [ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f" ] @@ -211,7 +211,7 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testin { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": []string{ + "docIDs": []string{ "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", }, @@ -245,10 +245,10 @@ func TestDefaultExplainRequestWithDocKeysFilterUsingMultipleUniqueKeys(t *testin explainUtils.ExecuteTestCase(t, test) } -func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { +func TestDefaultExplainRequestWithMatchingIDFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (default) request with a filter to match key.", + Description: "Explain (default) request with a filter to match ID.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -258,7 +258,7 @@ func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { Request: `query @explain { Author( filter: { - _key: { + _docID: { _eq: "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" } } @@ -274,7 +274,7 @@ func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { { TargetNodeName: "selectNode", ExpectedAttributes: dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, }, }, @@ -285,7 +285,7 @@ func TestDefaultExplainRequestWithMatchingKeyFilter(t *testing.T) { "collectionID": "3", "collectionName": "Author", "filter": dataMap{ - "_key": dataMap{ + "_docID": dataMap{ "_eq": "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d", }, }, diff --git a/tests/integration/explain/default/with_sum_join_test.go b/tests/integration/explain/default/with_sum_join_test.go index 74d330fefd..5117031959 100644 --- a/tests/integration/explain/default/with_sum_join_test.go +++ b/tests/integration/explain/default/with_sum_join_test.go @@ -42,7 +42,7 @@ func TestDefaultExplainRequestWithSumOnOneToManyJoinedField(t *testing.T) { Request: `query @explain { Author { name - _key + _docID TotalPages: _sum( books: {field: pages} ) diff --git a/tests/integration/explain/execute/dagscan_test.go b/tests/integration/explain/execute/dagscan_test.go index 9b91ff5003..3edc6e71f0 100644 --- a/tests/integration/explain/execute/dagscan_test.go +++ b/tests/integration/explain/execute/dagscan_test.go @@ -30,7 +30,7 @@ func TestExecuteExplainCommitsDagScan(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - commits (dockey: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { + commits (docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { links { cid } @@ -75,7 +75,7 @@ func TestExecuteExplainLatestCommitsDagScan(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: execute) { - latestCommits(dockey: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { + latestCommits(docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138") { cid links { cid diff --git a/tests/integration/explain/execute/delete_test.go b/tests/integration/explain/execute/delete_test.go index e924ce334c..75aa515c1a 100644 --- a/tests/integration/explain/execute/delete_test.go +++ b/tests/integration/explain/execute/delete_test.go @@ -20,7 +20,7 @@ import ( func TestExecuteExplainMutationRequestWithDeleteUsingID(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (execute) mutation request with deletion using id.", + Description: "Explain (execute) mutation request with deletion using document id.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -30,7 +30,7 @@ func TestExecuteExplainMutationRequestWithDeleteUsingID(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { - delete_ContactAddress(ids: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { + delete_ContactAddress(docIDs: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { city } }`, diff --git a/tests/integration/explain/execute/fixture.go b/tests/integration/explain/execute/fixture.go index ec83514778..7de5e6a959 100644 --- a/tests/integration/explain/execute/fixture.go +++ b/tests/integration/explain/execute/fixture.go @@ -81,7 +81,7 @@ func create2AuthorDocuments() []testUtils.CreateDoc { return []testUtils.CreateDoc{ { CollectionID: 2, - // _key: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" + // _docID: "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" Doc: `{ "name": "John Grisham", "age": 65, @@ -91,7 +91,7 @@ func create2AuthorDocuments() []testUtils.CreateDoc { }, { CollectionID: 2, - // _key: "bae-68cb395d-df73-5bcb-b623-615a140dee12" + // _docID: "bae-68cb395d-df73-5bcb-b623-615a140dee12" Doc: `{ "name": "Cornelia Funke", "age": 62, @@ -107,7 +107,7 @@ func create2AuthorContactDocuments() []testUtils.CreateDoc { { CollectionID: 3, // "author_id": "bae-7f54d9e0-cbde-5320-aa6c-5c8895a89138" - // _key: "bae-4db5359b-7dbe-5778-b96f-d71d1e6d0871" + // _docID: "bae-4db5359b-7dbe-5778-b96f-d71d1e6d0871" Doc: `{ "cell": "5197212301", "email": "john_grisham@example.com", @@ -117,7 +117,7 @@ func create2AuthorContactDocuments() []testUtils.CreateDoc { { CollectionID: 3, // "author_id": "bae-68cb395d-df73-5bcb-b623-615a140dee12", - // _key: "bae-1f19fc5d-de4d-59a5-bbde-492be1757d65" + // _docID: "bae-1f19fc5d-de4d-59a5-bbde-492be1757d65" Doc: `{ "cell": "5197212302", "email": "cornelia_funke@example.com", @@ -132,7 +132,7 @@ func create2AddressDocuments() []testUtils.CreateDoc { { CollectionID: 4, // "contact_id": "bae-4db5359b-7dbe-5778-b96f-d71d1e6d0871" - // _key: bae-c8448e47-6cd1-571f-90bd-364acb80da7b + // _docID: bae-c8448e47-6cd1-571f-90bd-364acb80da7b Doc: `{ "city": "Waterloo", "country": "Canada" @@ -141,7 +141,7 @@ func create2AddressDocuments() []testUtils.CreateDoc { { CollectionID: 4, // "contact_id": ""bae-1f19fc5d-de4d-59a5-bbde-492be1757d65"" - // _key: bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692 + // _docID: bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692 Doc: `{ "city": "Brampton", "country": "Canada" diff --git a/tests/integration/explain/execute/query_deleted_docs_test.go b/tests/integration/explain/execute/query_deleted_docs_test.go index cb1ebbcaa7..7872eb4847 100644 --- a/tests/integration/explain/execute/query_deleted_docs_test.go +++ b/tests/integration/explain/execute/query_deleted_docs_test.go @@ -26,12 +26,12 @@ func TestExecuteExplainQueryDeletedDocs(t *testing.T) { create2AddressDocuments(), testUtils.Request{ Request: `mutation { - delete_ContactAddress(ids: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { - _key + delete_ContactAddress(docIDs: ["bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"]) { + _docID } }`, Results: []map[string]any{ - {"_key": "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"}, + {"_docID": "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692"}, }, }, testUtils.ExplainRequest{ diff --git a/tests/integration/explain/execute/update_test.go b/tests/integration/explain/execute/update_test.go index a1fa92b091..fa54f7f331 100644 --- a/tests/integration/explain/execute/update_test.go +++ b/tests/integration/explain/execute/update_test.go @@ -20,7 +20,7 @@ import ( func TestExecuteExplainMutationRequestWithUpdateUsingIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Explain (execute) mutation request with update using ids.", + Description: "Explain (execute) mutation request with update using document IDs.", Actions: []any{ explainUtils.SchemaForExplainTests, @@ -31,7 +31,7 @@ func TestExecuteExplainMutationRequestWithUpdateUsingIDs(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { update_ContactAddress( - ids: [ + docIDs: [ "bae-c8448e47-6cd1-571f-90bd-364acb80da7b", "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692" ], diff --git a/tests/integration/explain/simple/basic_test.go b/tests/integration/explain/simple/basic_test.go index 9920458952..d94deb01a9 100644 --- a/tests/integration/explain/simple/basic_test.go +++ b/tests/integration/explain/simple/basic_test.go @@ -29,7 +29,7 @@ func TestSimpleExplainRequest(t *testing.T) { testUtils.ExplainRequest{ Request: `query @explain(type: simple) { Author { - _key + _docID name age } @@ -40,7 +40,7 @@ func TestSimpleExplainRequest(t *testing.T) { "explain": dataMap{ "selectTopNode": dataMap{ "selectNode": dataMap{ - "_keys": nil, + "docIDs": nil, "filter": nil, "scanNode": dataMap{ "filter": nil, diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index 0cea5023e6..69731e7b46 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -18,7 +18,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -const johnDockey = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" +const johnDocID = "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7" func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { test := testUtils.TestCase{ @@ -60,7 +60,7 @@ func TestCreateUniqueIndex_IfFieldValuesAreNotUnique_ReturnError(t *testing.T) { CollectionID: 0, FieldName: "age", Unique: true, - ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDocID, "age", 21).Error(), }, testUtils.GetIndexes{ CollectionID: 0, @@ -99,7 +99,7 @@ func TestUniqueIndexCreate_UponAddingDocWithExistingFieldValue_ReturnError(t *te "name": "John", "age": 21 }`, - ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDockey, "age", 21).Error(), + ExpectedError: db.NewErrCanNotIndexNonUniqueField(johnDocID, "age", 21).Error(), }, testUtils.Request{ Request: `query { diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go index 6fadbc5d85..3b37756b6c 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go @@ -108,7 +108,7 @@ func TestMutationCreateOneToMany_AliasedRelationNameInvalidIDManySide_CreatedDoc } func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to many create mutation using relation id from many side, with alias.", @@ -126,7 +126,7 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin "name": "Painted House", "author": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ @@ -174,9 +174,9 @@ func TestMutationCreateOneToMany_AliasedRelationNameToLinkFromManySide(t *testin } func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDocID(t *testing.T) { - // These keys MUST be shared by both tests below. - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + // These IDs MUST be shared by both tests below. + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" nonAliasedTest := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", @@ -194,18 +194,18 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo "name": "Painted House", "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ Request: `query { Book { - _key + _docID } }`, Results: []map[string]any{ { - "_key": bookKey, // Must be same as below. + "_docID": bookID, // Must be same as below. }, }, }, @@ -213,7 +213,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo } executeTestCase(t, nonAliasedTest) - // Check that `bookKey` is same in both above and the alised version below. + // Check that `bookID` is same in both above and the alised version below. // Note: Everything should be same, only diff should be the use of alias. aliasedTest := testUtils.TestCase{ @@ -232,18 +232,18 @@ func TestMutationUpdateOneToMany_AliasRelationNameAndInternalIDBothProduceSameDo "name": "Painted House", "author": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ Request: `query { Book { - _key + _docID } }`, Results: []map[string]any{ { - "_key": bookKey, // Must be same as above. + "_docID": bookID, // Must be same as below. }, }, }, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index 1e491e3626..da8bd1b7b0 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -74,7 +74,7 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Err "name": "Painted House", "author": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_UseAliasWithNonExistingRelationSecondarySide_Err } func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySide(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one create mutation with an alias relation.", @@ -100,7 +100,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid "name": "John Grisham", "published": "%s" }`, - bookKey, + bookID, ), }, testUtils.Request{ @@ -146,7 +146,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromPrimarySid } func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondarySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation from secondary side with alias relation.", @@ -164,7 +164,7 @@ func TestMutationCreateOneToOne_UseAliasedRelationNameToLink_QueryFromSecondaryS "name": "Painted House", "author": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index d22a77de6c..cf985bfa18 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -74,7 +74,7 @@ func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testin "name": "Painted House", "author_id": "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" }`, - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -82,7 +82,7 @@ func TestMutationCreateOneToOne_NonExistingRelationSecondarySide_Error(t *testin } func TestMutationCreateOneToOne(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one create mutation", @@ -100,7 +100,7 @@ func TestMutationCreateOneToOne(t *testing.T) { "name": "John Grisham", "published_id": "%s" }`, - bookKey, + bookID, ), }, testUtils.Request{ @@ -148,7 +148,7 @@ func TestMutationCreateOneToOne(t *testing.T) { } func TestMutationCreateOneToOneSecondarySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation from secondary side", @@ -166,7 +166,7 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { "name": "Painted House", "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ @@ -214,7 +214,7 @@ func TestMutationCreateOneToOneSecondarySide(t *testing.T) { } func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one create mutation, errors due to link already existing, primary side", @@ -231,7 +231,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( "name": "John Grisham", "published_id": "%s" }`, - bookKey, + bookID, ), }, testUtils.CreateDoc{ @@ -240,7 +240,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( "name": "Saadi Shirazi", "published_id": "%s" }`, - bookKey, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -251,7 +251,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaPrimary( } func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondary(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation, errors due to link already existing, secondary side", @@ -268,7 +268,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondar "name": "Painted House", "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.CreateDoc{ @@ -277,7 +277,7 @@ func TestMutationCreateOneToOne_ErrorsGivenRelationAlreadyEstablishedViaSecondar "name": "Golestan", "author_id": "%s" }`, - authorKey, + authorID, ), ExpectedError: "target document is already linked to another document.", }, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go index 0cc3807ddc..946c081929 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go @@ -24,7 +24,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. Actions: []any{ testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -32,7 +32,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }, testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -43,12 +43,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(0), Request: `mutation { create_Book(data: "{\"name\": \"Book By Website\",\"rating\": 4.0, \"publisher_id\": \"bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -56,12 +56,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(1), Request: `mutation { create_Book(data: "{\"name\": \"Book By Online\",\"rating\": 4.0, \"publisher_id\": \"bae-8a381044-9206-51e7-8bc8-dc683d5f2523\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", }, }, }, @@ -70,26 +70,26 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(0), Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", "published": map[string]any{ - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", }, }, { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", "name": "Online", "published": nil, }, @@ -100,27 +100,27 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. TransactionID: immutable.Some(1), Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": nil, }, { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", "published": map[string]any{ - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", }, }, }, @@ -136,30 +136,30 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. // Assert books -> publisher direction outside the transactions. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", "publisher": map[string]any{ - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", }, }, { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", "publisher": map[string]any{ - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", }, }, }, @@ -176,7 +176,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing Actions: []any{ testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -184,7 +184,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }, testUtils.CreateDoc{ CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -195,12 +195,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(0), Request: `mutation { create_Book(data: "{\"name\": \"Book By Website\",\"rating\": 4.0, \"publisher_id\": \"bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -208,12 +208,12 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(1), Request: `mutation { create_Book(data: "{\"name\": \"Book By Online\",\"rating\": 4.0, \"publisher_id\": \"bae-8a381044-9206-51e7-8bc8-dc683d5f2523\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", }, }, }, @@ -222,21 +222,21 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(0), Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", "publisher": map[string]any{ - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", }, }, }, @@ -246,21 +246,21 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing TransactionID: immutable.Some(1), Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", "publisher": map[string]any{ - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", }, }, }, @@ -276,30 +276,30 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing // Assert publishers -> books direction outside the transactions. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", "published": map[string]any{ - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", }, }, { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", - "name": "Online", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "name": "Online", "published": map[string]any{ - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", - "name": "Book By Online", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "name": "Book By Online", }, }, }, diff --git a/tests/integration/mutation/create/simple_test.go b/tests/integration/mutation/create/simple_test.go index 54f3de9536..cedac8c58e 100644 --- a/tests/integration/mutation/create/simple_test.go +++ b/tests/integration/mutation/create/simple_test.go @@ -75,7 +75,7 @@ func TestMutationCreate(t *testing.T) { Request: ` query { Users { - _key + _docID name age } @@ -83,9 +83,9 @@ func TestMutationCreate(t *testing.T) { `, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(27), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(27), }, }, }, @@ -124,7 +124,7 @@ func TestMutationCreate_GivenDuplicate_Errors(t *testing.T) { "name": "John", "age": 27 }`, - ExpectedError: "a document with the given dockey already exists.", + ExpectedError: "a document with the given ID already exists", }, }, } @@ -146,7 +146,7 @@ func TestMutationCreate_GivenEmptyData_Errors(t *testing.T) { testUtils.Request{ Request: `mutation { create_Users(data: "") { - _key + _docID } }`, ExpectedError: "given data payload is empty", diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index 7cf879737e..1d98ead005 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -39,7 +39,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafybeifwfw3g4q6tagffdwq4orrouoosdlsc5rb67q2uj7oplkq7ax5ysm", + "cid": "bafybeicbv34oa4hfcnqbka3jqnby4g75ttlj4wfvc7zhvat5xca45ggq2u", }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index a30cf60050..4d75d3b916 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -20,7 +20,7 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testing.T) { +func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *testing.T) { jsonString1 := `{ "name": "John", "age": 30 @@ -32,7 +32,7 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin "name": "John and the philosopher are stoned", "rating": 9.9, "author_id": "%s" - }`, doc1.Key()) + }`, doc1.ID()) doc2, err := client.NewDocFromJSON([]byte(jsonString2)) require.NoError(t, err) @@ -40,12 +40,12 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin "name": "John has a chamber of secrets", "rating": 9.9, "author_id": "%s" - }`, doc1.Key()) + }`, doc1.ID()) // doc3, err := client.NewDocFromJSON([]byte(jsonString1)) // require.NoError(t, err) test := testUtils.TestCase{ - Description: "One to many delete document using single key show deleted.", + Description: "One to many delete document using single document id, show deleted.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -75,13 +75,13 @@ func TestDeletionOfADocumentUsingSingleKeyWithShowDeletedDocumentQuery(t *testin }, testUtils.Request{ Request: fmt.Sprintf(`mutation { - delete_Book(id: "%s") { - _key + delete_Book(docID: "%s") { + _docID } - }`, doc2.Key()), + }`, doc2.ID()), Results: []map[string]any{ { - "_key": doc2.Key().String(), + "_docID": doc2.ID().String(), }, }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go index 851ec73da0..2efa3ef960 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_id_test.go @@ -53,13 +53,13 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(id: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { - _key + delete_Author(docID: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-2f80f359-535d-508e-ba58-088a309ce3c3", + "_docID": "bae-2f80f359-535d-508e-ba58-088a309ce3c3", }, }, }, @@ -67,7 +67,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, { - Description: "Relational delete mutation with an aliased _key name.", + Description: "Relational delete mutation with an aliased _docID name.", Actions: []any{ testUtils.CreateDoc{ // Books @@ -101,8 +101,8 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(id: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { - AliasOfKey: _key + delete_Author(docID: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { + AliasOfKey: _docID } }`, Results: []map[string]any{ @@ -115,7 +115,7 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, { - Description: "Relational Delete of an updated document and an aliased _key name.", + Description: "Relational Delete of an updated document and an aliased _docID name.", Actions: []any{ testUtils.CreateDoc{ // Books @@ -166,8 +166,8 @@ func TestRelationalDeletionOfADocumentUsingSingleKey_Success(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_Author(id: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { - Key: _key + delete_Author(docID: "bae-2f80f359-535d-508e-ba58-088a309ce3c3") { + Key: _docID } }`, Results: []map[string]any{ diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go index 166642ae13..6447551393 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go @@ -25,7 +25,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -35,7 +35,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -45,13 +45,13 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -62,17 +62,17 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": nil, }, @@ -91,7 +91,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -101,7 +101,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -111,13 +111,13 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -128,10 +128,10 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } @@ -151,7 +151,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -161,7 +161,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -171,13 +171,13 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -186,21 +186,21 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes TransactionID: immutable.Some(1), Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", "published": map[string]any{ - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", }, }, }, @@ -212,17 +212,17 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": nil, }, @@ -241,7 +241,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -251,7 +251,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + // "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", Doc: `{ "name": "Website", "address": "Manning Publications" @@ -261,13 +261,13 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te // Delete a linked book that exists in transaction 0. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(id: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { - _key + delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", }, }, }, @@ -276,21 +276,21 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te TransactionID: immutable.Some(1), Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", - "name": "Book By Website", + "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "name": "Book By Website", "publisher": map[string]any{ - "_key": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", - "name": "Website", + "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", + "name": "Website", }, }, }, @@ -302,10 +302,10 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te // Assert after transaction(s) have been commited, to ensure the book was deleted. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } @@ -325,7 +325,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + // "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -335,7 +335,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -346,13 +346,13 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) // book gets correctly unlinked too. TransactionID: immutable.Some(0), Request: `mutation { - delete_Publisher(id: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { - _key + delete_Publisher(docID: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", }, }, }, @@ -363,10 +363,10 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) // Assert after transaction(s) have been commited. Request: `query { Publisher { - _key + _docID name published { - _key + _docID name } } @@ -386,7 +386,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T testUtils.CreateDoc{ // books CollectionID: 0, - // "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + // "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -396,7 +396,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T testUtils.CreateDoc{ // publishers CollectionID: 2, - // "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + // "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", Doc: `{ "name": "Online", "address": "Manning Early Access Program (MEAP)" @@ -407,13 +407,13 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T // book gets correctly unlinked too. TransactionID: immutable.Some(0), Request: `mutation { - delete_Publisher(id: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { - _key + delete_Publisher(docID: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", + "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", }, }, }, @@ -424,17 +424,17 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T // Assert after transaction(s) have been commited. Request: `query { Book { - _key + _docID name publisher { - _key + _docID name } } }`, Results: []map[string]any{ { - "_key": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", "name": "Book By Online", "publisher": nil, }, diff --git a/tests/integration/mutation/delete/simple_test.go b/tests/integration/mutation/delete/simple_test.go index 5b28d100d3..63c5744c1b 100644 --- a/tests/integration/mutation/delete/simple_test.go +++ b/tests/integration/mutation/delete/simple_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithoutSubSelection(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist without sub selection, should give error.", + Description: "Delete without sub-selection, should give error.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -41,7 +41,7 @@ func TestMutationDeletion_WithoutSubSelection(t *testing.T) { func TestMutationDeletion_WithoutSubSelectionFields(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist without _key sub-selection.", + Description: "Delete without sub-selection fields, should give error.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/mutation/delete/with_deleted_field_test.go b/tests/integration/mutation/delete/with_deleted_field_test.go index 25784b52d2..55e1a9f2dd 100644 --- a/tests/integration/mutation/delete/with_deleted_field_test.go +++ b/tests/integration/mutation/delete/with_deleted_field_test.go @@ -18,7 +18,7 @@ import ( // This test documents a bug, see: // https://github.com/sourcenetwork/defradb/issues/1846 -func TestMutationDeletion_WithoDeletedField(t *testing.T) { +func TestMutationDeletion_WithDeletedField(t *testing.T) { test := testUtils.TestCase{ Actions: []any{ testUtils.SchemaUpdate{ @@ -36,16 +36,16 @@ func TestMutationDeletion_WithoDeletedField(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(id: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { + delete_User(docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { _deleted - _key + _docID } }`, Results: []map[string]any{ { // This should be true, as it has been deleted. "_deleted": false, - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", }, }, }, diff --git a/tests/integration/mutation/delete/with_id_alias_test.go b/tests/integration/mutation/delete/with_id_alias_test.go index 1890092b00..5709b7cadc 100644 --- a/tests/integration/mutation/delete/with_id_alias_test.go +++ b/tests/integration/mutation/delete/with_id_alias_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDAndAlias(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple delete mutation with an aliased _key name.", + Description: "Simple delete mutation with an alias field name.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,8 +34,8 @@ func TestMutationDeletion_WithIDAndAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - fancyKey: _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + fancyKey: _docID } }`, Results: []map[string]any{ diff --git a/tests/integration/mutation/delete/with_id_test.go b/tests/integration/mutation/delete/with_id_test.go index 68adcc7e6f..78c923693e 100644 --- a/tests/integration/mutation/delete/with_id_test.go +++ b/tests/integration/mutation/delete/with_id_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { test := testUtils.TestCase{ - Description: "Deletion using id that doesn't exist, where the collection is empty.", + Description: "Deletion using document id that doesn't exist, where the collection is empty.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -29,8 +29,8 @@ func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _docID } }`, Results: []map[string]any{}, @@ -43,7 +43,7 @@ func TestMutationDeletion_WithIDUnknownValue(t *testing.T) { func TestMutationDeletion_WithIDUnknownValueAndUnrelatedRecordInCollection(t *testing.T) { test := testUtils.TestCase{ - Description: "Deletion using id that doesn't exist, where the collection is non-empty.", + Description: "Deletion using document id that doesn't exist, where the collection is non-empty.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -59,8 +59,8 @@ func TestMutationDeletion_WithIDUnknownValueAndUnrelatedRecordInCollection(t *te }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _docID } }`, Results: []map[string]any{}, diff --git a/tests/integration/mutation/delete/with_id_txn_test.go b/tests/integration/mutation/delete/with_id_txn_test.go index c4f2ad6bdc..aeb6a4772b 100644 --- a/tests/integration/mutation/delete/with_id_txn_test.go +++ b/tests/integration/mutation/delete/with_id_txn_test.go @@ -37,13 +37,13 @@ func TestMutationDeletion_WithIDAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, }, }, @@ -51,7 +51,7 @@ func TestMutationDeletion_WithIDAndTxn(t *testing.T) { TransactionID: immutable.Some(0), Request: `query { User { - _key + _docID } }`, Results: []map[string]any{}, diff --git a/tests/integration/mutation/delete/with_ids_alias_test.go b/tests/integration/mutation/delete/with_ids_alias_test.go index 1c6be23278..e91432e787 100644 --- a/tests/integration/mutation/delete/with_ids_alias_test.go +++ b/tests/integration/mutation/delete/with_ids_alias_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDsAndSelectAlias(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist, when given multiple keys with alias.", + Description: "Delete multiple documents that exist, when given multiple IDs with alias.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -48,16 +48,16 @@ func TestMutationDeletion_WithIDsAndSelectAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - AliasKey: _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { + AliasID: _docID } }`, Results: []map[string]any{ { - "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + "AliasID": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", }, { - "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "AliasID": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", }, }, }, diff --git a/tests/integration/mutation/delete/with_ids_filter_test.go b/tests/integration/mutation/delete/with_ids_filter_test.go index 8d93bdf9cf..02ddb656f7 100644 --- a/tests/integration/mutation/delete/with_ids_filter_test.go +++ b/tests/integration/mutation/delete/with_ids_filter_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDsAndEmptyFilter(t *testing.T) { test := testUtils.TestCase{ - Description: "Deletion of using ids and filter, known id and empty filter.", + Description: "Deletion of using document ids and filter, known id and empty filter.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -34,13 +34,13 @@ func TestMutationDeletion_WithIDsAndEmptyFilter(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"], filter: {}) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5"], filter: {}) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, }, }, diff --git a/tests/integration/mutation/delete/with_ids_test.go b/tests/integration/mutation/delete/with_ids_test.go index 48adcb2e48..18371c2d70 100644 --- a/tests/integration/mutation/delete/with_ids_test.go +++ b/tests/integration/mutation/delete/with_ids_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithIDs(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist, when given multiple keys.", + Description: "Delete multiple documents that exist, when given multiple IDs.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -39,16 +39,16 @@ func TestMutationDeletion_WithIDs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", }, }, }, @@ -81,8 +81,8 @@ func TestMutationDeletion_WithEmptyIDs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: []) { - _key + delete_User(docIDs: []) { + _docID } }`, Results: []map[string]any{}, @@ -122,8 +122,8 @@ func TestMutationDeletion_WithIDsSingleUnknownID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { - _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507e"]) { + _docID } }`, Results: []map[string]any{}, @@ -147,8 +147,8 @@ func TestMutationDeletion_WithIDsMultipleUnknownID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { - _key + delete_User(docIDs: ["bae-028383cc-d6ba-5df7-959f-2bdce3536a05", "bae-028383cc-d6ba-5df7-959f-2bdce3536a03"]) { + _docID } }`, Results: []map[string]any{}, @@ -177,13 +177,13 @@ func TestMutationDeletion_WithIDsKnownAndUnknown(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { - _key + delete_User(docIDs: ["bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", + "_docID": "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5", }, }, }, diff --git a/tests/integration/mutation/delete/with_ids_txn_test.go b/tests/integration/mutation/delete/with_ids_txn_test.go index ab3ed174f1..c59ec5c262 100644 --- a/tests/integration/mutation/delete/with_ids_txn_test.go +++ b/tests/integration/mutation/delete/with_ids_txn_test.go @@ -20,7 +20,7 @@ import ( func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple multi-key delete mutation with one key that exists and txn.", + Description: "Simple multi-docIDs delete mutation with one ID that exists and txn.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -43,21 +43,21 @@ func TestMutationDeletion_WithIDsAndTxn(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { - _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "_docID": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", }, }, }, testUtils.Request{ TransactionID: immutable.Some(0), Request: `query { - User(dockeys: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { - _key + User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d"]) { + _docID } }`, Results: []map[string]any{}, diff --git a/tests/integration/mutation/delete/with_ids_update_alias_test.go b/tests/integration/mutation/delete/with_ids_update_alias_test.go index a76dccffe2..076f4f9967 100644 --- a/tests/integration/mutation/delete/with_ids_update_alias_test.go +++ b/tests/integration/mutation/delete/with_ids_update_alias_test.go @@ -18,7 +18,7 @@ import ( func TestMutationDeletion_WithUpdateAndIDsAndSelectAlias(t *testing.T) { test := testUtils.TestCase{ - Description: "Delete multiple documents that exist, when given multiple keys with alias after update.", + Description: "Delete multiple documents that exist, when given multiple IDs with alias after update.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -56,16 +56,16 @@ func TestMutationDeletion_WithUpdateAndIDsAndSelectAlias(t *testing.T) { }, testUtils.Request{ Request: `mutation { - delete_User(ids: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { - AliasKey: _key + delete_User(docIDs: ["bae-6a6482a8-24e1-5c73-a237-ca569e41507d", "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e"]) { + AliasID: _docID } }`, Results: []map[string]any{ { - "AliasKey": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", + "AliasID": "bae-3a1a496e-24eb-5ae3-9c17-524c146a393e", }, { - "AliasKey": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", + "AliasID": "bae-6a6482a8-24e1-5c73-a237-ca569e41507d", }, }, }, diff --git a/tests/integration/mutation/mix/with_txn_test.go b/tests/integration/mutation/mix/with_txn_test.go index 8a88db606a..50cbee7809 100644 --- a/tests/integration/mutation/mix/with_txn_test.go +++ b/tests/integration/mutation/mix/with_txn_test.go @@ -34,25 +34,25 @@ func TestMutationWithTxnDeletesUserGivenSameTransaction(t *testing.T) { TransactionID: immutable.Some(0), Request: `mutation { create_User(data: "{\"name\": \"John\",\"age\": 27}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - delete_User(id: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { - _key + delete_User(docID: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, @@ -78,20 +78,20 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. TransactionID: immutable.Some(0), Request: `mutation { create_User(data: "{\"name\": \"John\",\"age\": 27}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, testUtils.Request{ TransactionID: immutable.Some(1), Request: `mutation { - delete_User(id: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { - _key + delete_User(docID: "bae-88b63198-7d38-5714-a9ff-21ba46374fd1") { + _docID } }`, Results: []map[string]any{}, @@ -100,16 +100,16 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. TransactionID: immutable.Some(0), Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(27), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(27), }, }, }, @@ -117,7 +117,7 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. TransactionID: immutable.Some(1), Request: `query { User { - _key + _docID name age } @@ -152,12 +152,12 @@ func TestMutationWithTxnDoesUpdateUserGivenSameTransactions(t *testing.T) { TransactionID: immutable.Some(0), Request: `mutation { update_User(data: "{\"age\": 28}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", }, }, }, @@ -165,16 +165,16 @@ func TestMutationWithTxnDoesUpdateUserGivenSameTransactions(t *testing.T) { TransactionID: immutable.Some(0), Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, @@ -206,16 +206,16 @@ func TestMutationWithTxnDoesNotUpdateUserGivenDifferentTransactions(t *testing.T TransactionID: immutable.Some(0), Request: `mutation { update_User(data: "{\"age\": 28}") { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, @@ -223,16 +223,16 @@ func TestMutationWithTxnDoesNotUpdateUserGivenDifferentTransactions(t *testing.T TransactionID: immutable.Some(1), Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(27), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(27), }, }, }, @@ -265,16 +265,16 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) TransactionID: immutable.Some(0), Request: `mutation { update_User(data: "{\"age\": 28}") { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, @@ -282,16 +282,16 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) TransactionID: immutable.Some(1), Request: `mutation { update_User(data: "{\"age\": 29}") { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(29), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(29), }, }, }, @@ -306,16 +306,16 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) // Query after transactions have been commited: Request: `query { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", - "name": "John", - "age": int64(28), + "_docID": "bae-88b63198-7d38-5714-a9ff-21ba46374fd1", + "name": "John", + "age": int64(28), }, }, }, diff --git a/tests/integration/mutation/special/invalid_operation_test.go b/tests/integration/mutation/special/invalid_operation_test.go index 4a8ae2f4c9..1694a37c67 100644 --- a/tests/integration/mutation/special/invalid_operation_test.go +++ b/tests/integration/mutation/special/invalid_operation_test.go @@ -30,7 +30,7 @@ func TestMutationInvalidMutation(t *testing.T) { testUtils.Request{ Request: `mutation { dostuff_User(data: "") { - _key + _docID } }`, ExpectedError: "Cannot query field \"dostuff_User\" on type \"Mutation\".", diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go index cb87d336f0..882fddd891 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go @@ -18,8 +18,8 @@ import ( ) func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from single side (wrong)", @@ -43,7 +43,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -54,7 +54,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing `{ "published_id": "%s" }`, - bookKey, + bookID, ), ExpectedError: "The given field does not exist. Name: published_id", }, @@ -67,8 +67,8 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side", @@ -86,7 +86,7 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -96,7 +96,7 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. `{ "author_id": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), }, testUtils.Request{ @@ -138,8 +138,8 @@ func TestMutationUpdateOneToMany_InvalidRelationIDToLinkFromManySide(t *testing. } func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side, with a wrong field.", @@ -163,7 +163,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -174,7 +174,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro "notName": "Unpainted Condo", "author_id": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, @@ -185,8 +185,8 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro } func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side", @@ -210,7 +210,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -220,7 +220,7 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySide(t *testing.T) { `{ "author_id": "%s" }`, - author2Key, + author2ID, ), }, testUtils.Request{ diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 46d4eb6f32..576b089d1c 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -20,8 +20,8 @@ import ( ) func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collection(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", @@ -52,7 +52,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -63,7 +63,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio `{ "published": "%s" }`, - bookKey, + bookID, ), ExpectedError: "The given field does not exist. Name: published", }, @@ -74,8 +74,8 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio } func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from single side (wrong)", @@ -105,7 +105,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -116,7 +116,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te `{ "published": "%s" }`, - bookKey, + bookID, ), ExpectedError: "The given field or alias to field does not exist. Name: published", }, @@ -129,8 +129,8 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_GQL(t *te // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", @@ -154,7 +154,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -164,7 +164,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), }, testUtils.Request{ @@ -212,8 +212,8 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-35953ca-518d-9e6b-9ce6cd00eff5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", @@ -235,7 +235,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -245,7 +245,7 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), ExpectedError: "The given field does not exist. Name: author", }, @@ -256,8 +256,8 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll } func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side, with a wrong field.", @@ -287,7 +287,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -298,7 +298,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie "notName": "Unpainted Condo", "author": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, @@ -309,8 +309,8 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie } func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", @@ -340,7 +340,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing. "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -350,7 +350,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing. `{ "author": "%s" }`, - author2Key, + author2ID, ), }, testUtils.Request{ diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index 57633fd126..c68dcce5a3 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -20,8 +20,8 @@ import ( ) func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from single side", @@ -50,7 +50,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -60,7 +60,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin `{ "published": "%s" }`, - bookKey, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -71,8 +71,8 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin } func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", @@ -101,7 +101,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -111,7 +111,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test `{ "author": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "target document is already linked to another document.", }, @@ -122,9 +122,9 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test } func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorKey := "bae-" + invalidLenSubKey + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidAuthorID := "bae-" + invalidLenSubID test := testUtils.TestCase{ Description: "One to one update mutation using invalid alias relation id", @@ -147,7 +147,7 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -157,9 +157,9 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", }, }, } @@ -168,8 +168,8 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * } func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", @@ -192,7 +192,7 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -202,9 +202,9 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ `{ "author": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -213,8 +213,8 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ } func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using relation alias name from secondary side, with a wrong field.", @@ -243,7 +243,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWron "name": "Painted House", "author": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -254,7 +254,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWron "notName": "Unpainted Condo", "author": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index 2a4c93644d..5b0980baab 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -22,7 +22,7 @@ import ( // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. func TestMutationUpdateOneToOneNoChild(t *testing.T) { - unknownKey := "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" + unknownID := "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" test := testUtils.TestCase{ Description: "One to one create mutation, from the wrong side", @@ -40,7 +40,7 @@ func TestMutationUpdateOneToOneNoChild(t *testing.T) { `{ "published_id": "%s" }`, - unknownKey, + unknownID, ), }, testUtils.Request{ @@ -61,7 +61,7 @@ func TestMutationUpdateOneToOneNoChild(t *testing.T) { } func TestMutationUpdateOneToOne(t *testing.T) { - bookKey := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" + bookID := "bae-3d236f89-6a31-5add-a36a-27971a2eac76" test := testUtils.TestCase{ Description: "One to one update mutation", @@ -85,7 +85,7 @@ func TestMutationUpdateOneToOne(t *testing.T) { `{ "published_id": "%s" }`, - bookKey, + bookID, ), }, testUtils.Request{ @@ -133,7 +133,7 @@ func TestMutationUpdateOneToOne(t *testing.T) { } func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { - authorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + authorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" test := testUtils.TestCase{ Description: "One to one create mutation, from the secondary side", @@ -157,7 +157,7 @@ func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { `{ "author_id": "%s" }`, - authorKey, + authorID, ), }, testUtils.Request{ @@ -204,8 +204,8 @@ func TestMutationUpdateOneToOneSecondarySide(t *testing.T) { } func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - bookKey := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + bookID := "bae-22e0a1c2-d12b-5bfd-b039-0cf72f963991" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from single side (wrong)", @@ -229,7 +229,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -239,7 +239,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { `{ "published_id": "%s" }`, - bookKey, + bookID, ), ExpectedError: "target document is already linked to another document.", }, @@ -250,8 +250,8 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromPrimarySide(t *testing.T) { } func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side", @@ -275,7 +275,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -285,7 +285,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) `{ "author_id": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "target document is already linked to another document.", }, @@ -296,9 +296,9 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySide(t *testing.T) } func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidLenSubKey := "35953ca-518d-9e6b-9ce6cd00eff5" - invalidAuthorKey := "bae-" + invalidLenSubKey + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidLenSubID := "35953ca-518d-9e6b-9ce6cd00eff5" + invalidAuthorID := "bae-" + invalidLenSubID test := testUtils.TestCase{ Description: "One to one update mutation using invalid relation id", @@ -316,7 +316,7 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -326,9 +326,9 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T `{ "author_id": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubKey + "\"", + ExpectedError: "uuid: incorrect UUID length 30 in string \"" + invalidLenSubID + "\"", }, }, } @@ -337,8 +337,8 @@ func TestMutationUpdateOneToOne_InvalidLengthRelationIDToLink_Error(t *testing.T } func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - invalidAuthorKey := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + invalidAuthorID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ee" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side", @@ -356,7 +356,7 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -366,9 +366,9 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t `{ "author_id": "%s" }`, - invalidAuthorKey, + invalidAuthorID, ), - ExpectedError: "no document for the given key exists", + ExpectedError: "no document for the given ID exists", }, }, } @@ -377,8 +377,8 @@ func TestMutationUpdateOneToOne_InvalidRelationIDToLinkFromSecondarySide_Error(t } func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_Error(t *testing.T) { - author1Key := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" - author2Key := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" + author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" + author2ID := "bae-35953caf-4898-518d-9e6b-9ce6cd86ebe5" test := testUtils.TestCase{ Description: "One to one update mutation using relation id from secondary side, with a wrong field.", @@ -407,7 +407,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_ "name": "Painted House", "author_id": "%s" }`, - author1Key, + author1ID, ), }, testUtils.UpdateDoc{ @@ -418,7 +418,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_ "notName": "Unpainted Condo", "author_id": "%s" }`, - author2Key, + author2ID, ), ExpectedError: "The given field does not exist. Name: notName", }, diff --git a/tests/integration/mutation/update/underscored_schema_test.go b/tests/integration/mutation/update/underscored_schema_test.go index 7639458ae5..fa25f22d06 100644 --- a/tests/integration/mutation/update/underscored_schema_test.go +++ b/tests/integration/mutation/update/underscored_schema_test.go @@ -19,6 +19,7 @@ import ( func TestMutationUpdateUnderscoredSchema(t *testing.T) { test := testUtils.TestCase{ Description: "Simple update of schema with underscored name", + Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/mutation/update/with_delete_test.go b/tests/integration/mutation/update/with_delete_test.go index 444d16f87c..d949bbaa8c 100644 --- a/tests/integration/mutation/update/with_delete_test.go +++ b/tests/integration/mutation/update/with_delete_test.go @@ -46,7 +46,7 @@ func TestUpdateSave_DeletedDoc_DoesNothing(t *testing.T) { Doc: `{ "name": "Fred" }`, - ExpectedError: "a document with the given dockey has been deleted", + ExpectedError: "a document with the given ID has been deleted", }, }, } diff --git a/tests/integration/mutation/update/with_filter_test.go b/tests/integration/mutation/update/with_filter_test.go index 1b47ee6840..455ff99bbf 100644 --- a/tests/integration/mutation/update/with_filter_test.go +++ b/tests/integration/mutation/update/with_filter_test.go @@ -38,7 +38,7 @@ func TestMutationUpdate_WithBooleanFilter_ResultFilteredOut(t *testing.T) { // The update will result in a record that no longer matches the filter Request: `mutation { update_Users(filter: {verified: {_eq: true}}, data: "{\"verified\":false}") { - _key + _docID name verified } diff --git a/tests/integration/mutation/update/with_id_test.go b/tests/integration/mutation/update/with_id_test.go index 59b47bc234..ddc0fe7128 100644 --- a/tests/integration/mutation/update/with_id_test.go +++ b/tests/integration/mutation/update/with_id_test.go @@ -18,7 +18,7 @@ import ( func TestMutationUpdate_WithId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple update mutation with id", + Description: "Simple update mutation with document id", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -43,7 +43,7 @@ func TestMutationUpdate_WithId(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(id: "bae-cc36febf-4029-52b3-a876-c99c6293f588", data: "{\"points\": 59}") { + update_Users(docID: "bae-cc36febf-4029-52b3-a876-c99c6293f588", data: "{\"points\": 59}") { name points } @@ -63,7 +63,7 @@ func TestMutationUpdate_WithId(t *testing.T) { func TestMutationUpdate_WithNonExistantId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple update mutation with non existant id", + Description: "Simple update mutation with non existant document id", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -82,8 +82,8 @@ func TestMutationUpdate_WithNonExistantId(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(id: "bae-does-not-exist", data: "{\"points\": 59}") { - _key + update_Users(docID: "bae-does-not-exist", data: "{\"points\": 59}") { + _docID name points } diff --git a/tests/integration/mutation/update/with_ids_test.go b/tests/integration/mutation/update/with_ids_test.go index 6e8ff33dab..8d7a4aa6f0 100644 --- a/tests/integration/mutation/update/with_ids_test.go +++ b/tests/integration/mutation/update/with_ids_test.go @@ -51,7 +51,7 @@ func TestMutationUpdate_WithIds(t *testing.T) { testUtils.Request{ Request: `mutation { update_Users( - ids: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901"], + docIDs: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901"], data: "{\"points\": 59}" ) { name diff --git a/tests/integration/net/order/tcp_test.go b/tests/integration/net/order/tcp_test.go index a66856be3e..e33ca7c1e1 100644 --- a/tests/integration/net/order/tcp_test.go +++ b/tests/integration/net/order/tcp_test.go @@ -156,7 +156,7 @@ func TestP2FullPReplicator(t *testing.T) { }, ReplicatorResult: map[int]map[string]map[string]any{ 1: { - doc.Key().String(): { + doc.ID().String(): { "Age": int64(21), }, }, diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 84f41f98d6..09aa44bb13 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -63,13 +63,13 @@ type P2PTestCase struct { SeedDocuments []string DocumentsToReplicate []*client.Document - // node/dockey/values + // node/docID/values Updates map[int]map[int][]string Results map[int]map[int]map[string]any ReplicatorResult map[int]map[string]map[string]any } -func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node, []client.DocKey, error) { +func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node, []client.DocID, error) { ctx := context.Background() log.Info(ctx, "Building new memory store") @@ -83,11 +83,11 @@ func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node } // seed the database with a set of documents - dockeys := []client.DocKey{} + docIDs := []client.DocID{} for _, document := range seeds { - dockey, err := seedDocument(ctx, db, document) + docID, err := seedDocument(ctx, db, document) require.NoError(t, err) - dockeys = append(dockeys, dockey) + docIDs = append(docIDs, docID) } // init the P2P node @@ -120,7 +120,7 @@ func setupDefraNode(t *testing.T, cfg *config.Config, seeds []string) (*net.Node cfg.Net.P2PAddress = n.ListenAddrs()[0].String() - return n, dockeys, nil + return n, docIDs, nil } func seedSchema(ctx context.Context, db client.DB) error { @@ -128,23 +128,23 @@ func seedSchema(ctx context.Context, db client.DB) error { return err } -func seedDocument(ctx context.Context, db client.DB, document string) (client.DocKey, error) { +func seedDocument(ctx context.Context, db client.DB, document string) (client.DocID, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { - return client.DocKey{}, err + return client.DocID{}, err } doc, err := client.NewDocFromJSON([]byte(document)) if err != nil { - return client.DocKey{}, err + return client.DocID{}, err } err = col.Save(ctx, doc) if err != nil { - return client.DocKey{}, err + return client.DocID{}, err } - return doc.Key(), nil + return doc.ID(), nil } func saveDocument(ctx context.Context, db client.DB, document *client.Document) error { @@ -156,13 +156,13 @@ func saveDocument(ctx context.Context, db client.DB, document *client.Document) return col.Save(ctx, document) } -func updateDocument(ctx context.Context, db client.DB, dockey client.DocKey, update string) error { +func updateDocument(ctx context.Context, db client.DB, docID client.DocID, update string) error { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return err } - doc, err := getDocument(ctx, db, dockey) + doc, err := getDocument(ctx, db, docID) if err != nil { return err } @@ -174,13 +174,13 @@ func updateDocument(ctx context.Context, db client.DB, dockey client.DocKey, upd return col.Save(ctx, doc) } -func getDocument(ctx context.Context, db client.DB, dockey client.DocKey) (*client.Document, error) { +func getDocument(ctx context.Context, db client.DB, docID client.DocID) (*client.Document, error) { col, err := db.GetCollectionByName(ctx, userCollection) if err != nil { return nil, err } - doc, err := col.Get(ctx, dockey, false) + doc, err := col.Get(ctx, docID, false) if err != nil { return nil, err } @@ -190,7 +190,7 @@ func getDocument(ctx context.Context, db client.DB, dockey client.DocKey) (*clie func executeTestCase(t *testing.T, test P2PTestCase) { ctx := context.Background() - dockeys := []client.DocKey{} + docIDs := []client.DocID{} nodes := []*net.Node{} for i, cfg := range test.NodeConfig { @@ -215,7 +215,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { require.NoError(t, err) if i == 0 { - dockeys = d + docIDs = d } nodes = append(nodes, n) } @@ -249,7 +249,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for d, updates := range updateMap { for _, update := range updates { log.Info(ctx, fmt.Sprintf("Updating node %d with update %d", n, d)) - err := updateDocument(ctx, nodes[n].DB, dockeys[d], update) + err := updateDocument(ctx, nodes[n].DB, docIDs[d], update) require.NoError(t, err) // wait for peers to sync @@ -277,7 +277,7 @@ func executeTestCase(t *testing.T, test P2PTestCase) { for d, results := range resultsMap { for field, result := range results { - doc, err := getDocument(ctx, nodes[n2].DB, dockeys[d]) + doc, err := getDocument(ctx, nodes[n2].DB, docIDs[d]) require.NoError(t, err) val, err := doc.Get(field) @@ -318,9 +318,9 @@ func executeTestCase(t *testing.T, test P2PTestCase) { require.NoError(t, err) log.Info(ctx, fmt.Sprintf("Node %d synced", rep)) - for dockey, results := range test.ReplicatorResult[rep] { + for docID, results := range test.ReplicatorResult[rep] { for field, result := range results { - d, err := client.NewDocKeyFromString(dockey) + d, err := client.NewDocIDFromString(docID) require.NoError(t, err) doc, err := getDocument(ctx, nodes[rep].DB, d) diff --git a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go index 450902074a..8fd73fe06a 100644 --- a/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go +++ b/tests/integration/net/state/simple/peer/subscribe/with_add_get_test.go @@ -52,6 +52,8 @@ func TestP2PSubscribeAddGetMultiple(t *testing.T) { testUtils.RandomNetworkingConfig(), testUtils.RandomNetworkingConfig(), testUtils.SchemaUpdate{ + // Note: If a test is failing here in the error trace, you likely need to change the + // order of these schema types declared below (some renaming can cause this). Schema: ` type Users { name: String diff --git a/tests/integration/net/state/simple/replicator/with_create_test.go b/tests/integration/net/state/simple/replicator/with_create_test.go index f877457c9c..08433629c4 100644 --- a/tests/integration/net/state/simple/replicator/with_create_test.go +++ b/tests/integration/net/state/simple/replicator/with_create_test.go @@ -474,10 +474,10 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { testUtils.WaitForSync{}, testUtils.Request{ // The document should have been synced, and should contain the same values - // including dockey and schema version id. + // including document id and schema version id. Request: `query { Users { - _key + _docID age name _version { @@ -487,12 +487,12 @@ func TestP2POneToOneReplicatorOrderIndependent(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - "age": int64(21), - "name": "John", + "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "age": int64(21), + "name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", + "schemaVersionId": "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", }, }, }, @@ -537,11 +537,11 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { }`, }, testUtils.Request{ - // Assert that the dockey and schema version id are the same across all nodes, + // Assert that the document id and schema version id are the same across all nodes, // even though the schema field order is different. Request: `query { Users { - _key + _docID _version { schemaVersionId } @@ -549,10 +549,10 @@ func TestP2POneToOneReplicatorOrderIndependentDirectCreate(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "_docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", "_version": []map[string]any{ { - "schemaVersionId": "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", + "schemaVersionId": "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", }, }, }, diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index 1ee63bcedd..a0dd120ec4 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -36,13 +36,13 @@ func TestQueryCommits(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -79,22 +79,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiftg4c3aioppm2mn5f7wuqynbezricqdzpvspkd74jm7lq2jrst6m", + "cid": "bafybeifnoeodhrvpimwnuwcxmz2fxci6cwrw5ck5vo5n6rkkdt47hepyhm", }, { - "cid": "bafybeielma57bnbv5oizjsv7szhu6jq45rxfcdof62opaygyyqp2j7qd5e", + "cid": "bafybeihx6t43wc23xzak7raultfzpvnetrsi7vhzglray3r7k4gdksbuk4", }, { - "cid": "bafybeigvf4bcuc53dphwniloxt3kqqoersoghdprxsjkb6xqq7wup34usy", + "cid": "bafybeicvpe4oyfrgcuhf2eqqgp2iwuifgl73d6jo4pdlg3x3vqmnusgxv4", }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -125,16 +125,16 @@ func TestQueryCommitsWithSchemaVersionIdField(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, }, @@ -285,6 +285,8 @@ func TestQueryCommitsWithFieldIDFieldWithUpdate(t *testing.T) { testUtils.Request{ Request: ` query { + + commits { fieldId } diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index 46d767620e..d34b5c7f0f 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,14 +38,14 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq" + cid: "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, }, }, @@ -56,7 +56,7 @@ func TestQueryCommitsWithCid(t *testing.T) { } func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { - // cid is for a field commit, see TestQueryCommitsWithDockeyAndFieldId + // cid is for a field commit, see TestQueryCommitsWithDocIDAndFieldId test := testUtils.TestCase{ Description: "Simple all commits query with cid", Actions: []any{ @@ -71,14 +71,14 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq" + cid: "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, }, }, diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index f3bc9bc35c..8977a84bbb 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -36,13 +36,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -81,16 +81,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { Results: []map[string]any{ { // "Age" field head - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, }, @@ -137,27 +137,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { Results: []map[string]any{ { // Composite head - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", "height": int64(3), }, { // Composite head -1 - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { // "Age" field head - "cid": "bafybeidwgrk2xyu25pmwvpkfs4hnswtgej6gopkf26jrgm6lpbofa3rs3e", + "cid": "bafybeieirgdstog2griwuuxgb4c3frgka55yoodjwdznraoieqcxfdijw4", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, }, @@ -195,22 +195,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeihayvvwwsjvd3yefenc4ubebriluyg4rdzxmizrhefk4agotcqlp4", + "cid": "bafybeiasu5mdp6652oux4avwugv6gbd6ciqqsuj2zjv4ypksmiwndgwkeq", }, { - "cid": "bafybeiezcqlaqvozdw3ogdf2dxukwrf5m3xydd7lyy6ylcqycx5uqqepfm", + "cid": "bafybeia7shc4tpafpzblxqjyxmb7fayegsvaol3p2ucujaawig3wtopibu", }, { - "cid": "bafybeicr2lalkqj6weqcafm32posw22hjmybwohau57eswg5a442qilc2q", + "cid": "bafybeifwn57hy5m5rddplfxdomes34ykck775yvinc522nowspkvawqr6q", }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go similarity index 72% rename from tests/integration/query/commits/with_dockey_cid_test.go rename to tests/integration/query/commits/with_doc_id_cid_test.go index be1c3bf580..2003158565 100644 --- a/tests/integration/query/commits/with_dockey_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndCidForDifferentDoc(t *testing.T) { +func TestQueryCommitsWithDocIDAndCidForDifferentDoc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and cid", + Description: "Simple all commits query with docID and cid, for different doc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -31,7 +31,7 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDoc(t *testing.T) { testUtils.Request{ Request: ` { commits( - dockey: "bae-not-this-doc", + docID: "bae-not-this-doc", cid: "bafybeica4js2abwqjjrz7dcialbortbz32uxp7ufxu7yljbwvmhjqqxzny" ) { cid @@ -45,9 +45,9 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDoc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndCidForDifferentDocWithUpdate(t *testing.T) { +func TestQueryCommitsWithDocIDAndCidForDifferentDocWithUpdate(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and cid", + Description: "Simple all commits query with docID and cid, for different doc with update", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -67,7 +67,7 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDocWithUpdate(t *testing.T) { testUtils.Request{ Request: ` { commits( - dockey: "bae-not-this-doc", + docID: "bae-not-this-doc", cid: "bafybeica4js2abwqjjrz7dcialbortbz32uxp7ufxu7yljbwvmhjqqxzny" ) { cid @@ -81,9 +81,9 @@ func TestQueryCommitsWithDockeyAndCidForDifferentDocWithUpdate(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndCid(t *testing.T) { +func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and cid", + Description: "Simple all commits query with docID and cid, with update", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -103,15 +103,15 @@ func TestQueryCommitsWithDockeyAndCid(t *testing.T) { testUtils.Request{ Request: ` { commits( - dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - cid: "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm" + docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + cid: "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go similarity index 68% rename from tests/integration/query/commits/with_dockey_count_test.go rename to tests/integration/query/commits/with_doc_id_count_test.go index 7a61fa5fef..ba5b0eb589 100644 --- a/tests/integration/query/commits/with_dockey_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndLinkCount(t *testing.T) { +func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple latest commits query with dockey and link count", + Description: "Simple latest commits query with docID and link count", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,22 +30,22 @@ func TestQueryCommitsWithDockeyAndLinkCount(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid _count(field: links) } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "_count": 0, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "_count": 0, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_dockey_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go similarity index 66% rename from tests/integration/query/commits/with_dockey_field_test.go rename to tests/integration/query/commits/with_doc_id_field_test.go index b588300fb6..87b6edb06c 100644 --- a/tests/integration/query/commits/with_dockey_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndUnknownField(t *testing.T) { +func TestQueryCommitsWithDocIDAndUnknownField(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and unknown field", + Description: "Simple all commits query with docID and unknown field", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,7 +30,7 @@ func TestQueryCommitsWithDockeyAndUnknownField(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "not a field") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "not a field") { cid } }`, @@ -42,9 +42,9 @@ func TestQueryCommitsWithDockeyAndUnknownField(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndUnknownFieldId(t *testing.T) { +func TestQueryCommitsWithDocIDAndUnknownFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and unknown field id", + Description: "Simple all commits query with docID and unknown field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -56,7 +56,7 @@ func TestQueryCommitsWithDockeyAndUnknownFieldId(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "999999") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "999999") { cid } }`, @@ -69,10 +69,10 @@ func TestQueryCommitsWithDockeyAndUnknownFieldId(t *testing.T) { } // This test is for documentation reasons only. This is not -// desired behaviour (should return all commits for dockey-field). -func TestQueryCommitsWithDockeyAndField(t *testing.T) { +// desired behaviour (should return all commits for docID-field). +func TestQueryCommitsWithDocIDAndField(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field", + Description: "Simple all commits query with docID and field", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -84,7 +84,7 @@ func TestQueryCommitsWithDockeyAndField(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "Age") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "Age") { cid } }`, @@ -98,9 +98,9 @@ func TestQueryCommitsWithDockeyAndField(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryCommitsWithDockeyAndFieldId(t *testing.T) { +func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -112,13 +112,13 @@ func TestQueryCommitsWithDockeyAndFieldId(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { cid } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, }, }, @@ -130,9 +130,9 @@ func TestQueryCommitsWithDockeyAndFieldId(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryCommitsWithDockeyAndCompositeFieldId(t *testing.T) { +func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -144,13 +144,13 @@ func TestQueryCommitsWithDockeyAndCompositeFieldId(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { cid } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_group_order_test.go b/tests/integration/query/commits/with_doc_id_group_order_test.go similarity index 79% rename from tests/integration/query/commits/with_dockey_group_order_test.go rename to tests/integration/query/commits/with_doc_id_group_order_test.go index d29a3683ea..d858bcf819 100644 --- a/tests/integration/query/commits/with_dockey_group_order_test.go +++ b/tests/integration/query/commits/with_doc_id_group_order_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsOrderedAndGroupedByDocKey(t *testing.T) { +func TestQueryCommitsOrderedAndGroupedByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query, grouped and ordered by dockey", + Description: "Simple all commits query, grouped and ordered by docID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -37,16 +37,16 @@ func TestQueryCommitsOrderedAndGroupedByDocKey(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(groupBy: [dockey], order: {dockey: DESC}) { - dockey + commits(groupBy: [docID], order: {docID: DESC}) { + docID } }`, Results: []map[string]any{ { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { - "dockey": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", + "docID": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go similarity index 75% rename from tests/integration/query/commits/with_dockey_limit_offset_test.go rename to tests/integration/query/commits/with_doc_id_limit_offset_test.go index a8c6665bca..e6a622aa3c 100644 --- a/tests/integration/query/commits/with_dockey_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndLimitAndOffset(t *testing.T) { +func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, limit and offset", + Description: "Simple all commits query with docID, limit and offset", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -51,16 +51,16 @@ func TestQueryCommitsWithDockeyAndLimitAndOffset(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2, offset: 1) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2, offset: 1) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go similarity index 75% rename from tests/integration/query/commits/with_dockey_limit_test.go rename to tests/integration/query/commits/with_doc_id_limit_test.go index b9f8e51f8b..23b045b708 100644 --- a/tests/integration/query/commits/with_dockey_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndLimit(t *testing.T) { +func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and limit", + Description: "Simple all commits query with docID and limit", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -44,16 +44,16 @@ func TestQueryCommitsWithDockeyAndLimit(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", limit: 2) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go similarity index 74% rename from tests/integration/query/commits/with_dockey_order_limit_offset_test.go rename to tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 195e2b3a8e..118262d5c9 100644 --- a/tests/integration/query/commits/with_dockey_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndOrderAndLimitAndOffset(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order, limit and offset", + Description: "Simple all commits query with docID, order, limit and offset", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -51,18 +51,18 @@ func TestQueryCommitsWithDockeyAndOrderAndLimitAndOffset(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}, limit: 2, offset: 4) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}, limit: 2, offset: 4) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_dockey_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go similarity index 55% rename from tests/integration/query/commits/with_dockey_order_test.go rename to tests/integration/query/commits/with_doc_id_order_test.go index 2b4e8f6156..47f0ce3802 100644 --- a/tests/integration/query/commits/with_dockey_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyAndOrderHeightDesc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order height desc", + Description: "Simple all commits query with docID, order height desc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -37,30 +37,30 @@ func TestQueryCommitsWithDockeyAndOrderHeightDesc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: DESC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: DESC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, }, @@ -71,9 +71,9 @@ func TestQueryCommitsWithDockeyAndOrderHeightDesc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderHeightAsc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order height asc", + Description: "Simple all commits query with docID, order height asc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -92,30 +92,30 @@ func TestQueryCommitsWithDockeyAndOrderHeightAsc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, }, @@ -126,9 +126,9 @@ func TestQueryCommitsWithDockeyAndOrderHeightAsc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderCidDesc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order cid desc", + Description: "Simple all commits query with docID, order cid desc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -147,30 +147,30 @@ func TestQueryCommitsWithDockeyAndOrderCidDesc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: DESC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: DESC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", - "height": int64(2), + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "height": int64(1), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", - "height": int64(1), + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "height": int64(2), }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, }, @@ -181,9 +181,9 @@ func TestQueryCommitsWithDockeyAndOrderCidDesc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderCidAsc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, order cid asc", + Description: "Simple all commits query with docID, order cid asc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -202,30 +202,30 @@ func TestQueryCommitsWithDockeyAndOrderCidAsc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: ASC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {cid: ASC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", - "height": int64(1), + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "height": int64(2), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", - "height": int64(2), + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, }, @@ -236,9 +236,9 @@ func TestQueryCommitsWithDockeyAndOrderCidAsc(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndOrderAndMultiUpdatesCidAsc(t *testing.T) { +func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, multiple updates with order cid asc", + Description: "Simple all commits query with docID, multiple updates with order cid asc", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -271,46 +271,46 @@ func TestQueryCommitsWithDockeyAndOrderAndMultiUpdatesCidAsc(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", order: {height: ASC}) { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeietneua73vfrkvfefw5rmju7yhee6rywychdbls5xqmqtqmzfckzq", + "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", "height": int64(3), }, { - "cid": "bafybeidwgrk2xyu25pmwvpkfs4hnswtgej6gopkf26jrgm6lpbofa3rs3e", + "cid": "bafybeieirgdstog2griwuuxgb4c3frgka55yoodjwdznraoieqcxfdijw4", "height": int64(3), }, { - "cid": "bafybeiahvakoy5joy563em7hlzvqcarxqdp2nin4gnxythoj4fqjh7umzu", + "cid": "bafybeidoph22zh2c4kh2tx5qbg62nbrulvald6w5hgvp5x5rjurdbz3ibi", "height": int64(4), }, { - "cid": "bafybeighft6vokgntjvpirwdt233xizmnhxtawiqeahwypxv7u26dwseoe", + "cid": "bafybeiacs2yvfbjgk3xfz5zgt43gswo4jhreieenwkb4whpstjas5cpbdy", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_dockey_prop_test.go b/tests/integration/query/commits/with_doc_id_prop_test.go similarity index 74% rename from tests/integration/query/commits/with_dockey_prop_test.go rename to tests/integration/query/commits/with_doc_id_prop_test.go index daf21ba1c7..6404114217 100644 --- a/tests/integration/query/commits/with_dockey_prop_test.go +++ b/tests/integration/query/commits/with_doc_id_prop_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyProperty(t *testing.T) { +func TestQueryCommitsWithDocIDProperty(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple commits query with dockey property", + Description: "Simple commits query with docID property", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -31,18 +31,18 @@ func TestQueryCommitsWithDockeyProperty(t *testing.T) { testUtils.Request{ Request: `query { commits { - dockey + docID } }`, Results: []map[string]any{ { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, }, }, diff --git a/tests/integration/query/commits/with_dockey_test.go b/tests/integration/query/commits/with_doc_id_test.go similarity index 58% rename from tests/integration/query/commits/with_dockey_test.go rename to tests/integration/query/commits/with_doc_id_test.go index 9dde4bc41a..b69f278c10 100644 --- a/tests/integration/query/commits/with_dockey_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithUnknownDockey(t *testing.T) { +func TestQueryCommitsWithUnknownDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with unknown dockey", + Description: "Simple all commits query with unknown document ID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,7 +30,7 @@ func TestQueryCommitsWithUnknownDockey(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "unknown dockey") { + commits(docID: "unknown document ID") { cid } }`, @@ -42,9 +42,9 @@ func TestQueryCommitsWithUnknownDockey(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockey(t *testing.T) { +func TestQueryCommitsWithDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey", + Description: "Simple all commits query with docID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -56,19 +56,19 @@ func TestQueryCommitsWithDockey(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -78,9 +78,9 @@ func TestQueryCommitsWithDockey(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { +func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, with links", + Description: "Simple all commits query with docID, with links", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -92,7 +92,7 @@ func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid links { cid @@ -102,22 +102,22 @@ func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "links": []map[string]any{}, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "links": []map[string]any{}, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, @@ -130,9 +130,9 @@ func TestQueryCommitsWithDockeyAndLinks(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithDockeyAndUpdate(t *testing.T) { +func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, multiple results", + Description: "Simple all commits query with docID, multiple results", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -151,30 +151,30 @@ func TestQueryCommitsWithDockeyAndUpdate(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid height } }`, Results: []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "height": int64(2), }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "height": int64(1), }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "height": int64(1), }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "height": int64(2), }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "height": int64(1), }, }, @@ -188,9 +188,9 @@ func TestQueryCommitsWithDockeyAndUpdate(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (first results includes link._head, second // includes link._Name). -func TestQueryCommitsWithDockeyAndUpdateAndLinks(t *testing.T) { +func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey, multiple results and links", + Description: "Simple all commits query with docID, multiple results and links", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -209,7 +209,7 @@ func TestQueryCommitsWithDockeyAndUpdateAndLinks(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid links { cid @@ -219,44 +219,44 @@ func TestQueryCommitsWithDockeyAndUpdateAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "_head", }, }, }, { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "links": []map[string]any{}, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "links": []map[string]any{}, }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", "links": []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "name": "_head", }, { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", "name": "age", }, }, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_dockey_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go similarity index 69% rename from tests/integration/query/commits/with_dockey_typename_test.go rename to tests/integration/query/commits/with_doc_id_typename_test.go index f8573785f4..4c360c297e 100644 --- a/tests/integration/query/commits/with_dockey_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryCommitsWithDockeyWithTypeName(t *testing.T) { +func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and typename", + Description: "Simple all commits query with docID and typename", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,22 +30,22 @@ func TestQueryCommitsWithDockeyWithTypeName(t *testing.T) { }, testUtils.Request{ Request: `query { - commits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + commits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid __typename } }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "__typename": "Commit", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "__typename": "Commit", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index 008dc871d4..f8cd4e961f 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -17,7 +17,7 @@ import ( ) // This test is for documentation reasons only. This is not -// desired behaviour (should return all commits for dockey-field). +// desired behaviour (should return all commits for docID-field). func TestQueryCommitsWithField(t *testing.T) { test := testUtils.TestCase{ Description: "Simple all commits query with field", @@ -66,7 +66,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, }, }, @@ -80,7 +80,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { // desired behaviour (Users should not be specifying field ids). func TestQueryCommitsWithCompositeFieldId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -98,7 +98,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -112,7 +112,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { // desired behaviour (Users should not be specifying field ids). func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query with dockey and field id", + Description: "Simple all commits query with docID and field id", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -131,8 +131,8 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing. }`, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index d031d70540..64439c97e1 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -89,10 +89,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafybeift3qzwhklfpkgszvrmbfzb6zp3g3cqryhjkuaoz3kp2yrj763jce", + "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", }, { - "cid": "bafybeige35bkafoez4cf4v6hgdkm5iaqcuqfq4bkyt7fxeycbdnqtbr7g4", + "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", }, }, }, @@ -100,13 +100,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", }, }, }, @@ -142,7 +142,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "_group": []map[string]any{ { "height": int64(1), @@ -150,7 +150,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "_group": []map[string]any{ { "height": int64(1), @@ -158,7 +158,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "_group": []map[string]any{ { "height": int64(1), @@ -173,9 +173,9 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestQueryCommitsWithGroupByDocKey(t *testing.T) { +func TestQueryCommitsWithGroupByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple all commits query, group by dockey", + Description: "Simple all commits query, group by document ID", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -208,16 +208,16 @@ func TestQueryCommitsWithGroupByDocKey(t *testing.T) { }, testUtils.Request{ Request: ` { - commits(groupBy: [dockey]) { - dockey + commits(groupBy: [docID]) { + docID } }`, Results: []map[string]any{ { - "dockey": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", + "docID": "bae-72f3dc53-1846-55d5-915c-28c4e83cc891", }, { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, }, }, diff --git a/tests/integration/query/latest_commits/simple_test.go b/tests/integration/query/latest_commits/simple_test.go index 7a6e28f6d8..e31ee22da8 100644 --- a/tests/integration/query/latest_commits/simple_test.go +++ b/tests/integration/query/latest_commits/simple_test.go @@ -38,7 +38,7 @@ func TestQueryLatestCommits(t *testing.T) { }`, }, }, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", } executeTestCase(t, test) diff --git a/tests/integration/query/latest_commits/with_collectionid_prop_test.go b/tests/integration/query/latest_commits/with_collectionid_prop_test.go index afdd6ae7ee..78ffab9b3c 100644 --- a/tests/integration/query/latest_commits/with_collectionid_prop_test.go +++ b/tests/integration/query/latest_commits/with_collectionid_prop_test.go @@ -37,7 +37,7 @@ func TestQueryLastCommitsWithCollectionIdProperty(t *testing.T) { }, testUtils.Request{ Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { collectionID } }`, @@ -49,7 +49,7 @@ func TestQueryLastCommitsWithCollectionIdProperty(t *testing.T) { }, testUtils.Request{ Request: `query { - latestCommits(dockey: "bae-de8c99bf-ee0e-5655-8a72-919c2d459a30") { + latestCommits(docID: "bae-de8c99bf-ee0e-5655-8a72-919c2d459a30") { collectionID } }`, diff --git a/tests/integration/query/latest_commits/with_dockey_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go similarity index 65% rename from tests/integration/query/latest_commits/with_dockey_field_test.go rename to tests/integration/query/latest_commits/with_doc_id_field_test.go index dce5da651f..d320aff1a8 100644 --- a/tests/integration/query/latest_commits/with_dockey_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -18,11 +18,11 @@ import ( // This test is for documentation reasons only. This is not // desired behaviour (it looks totally broken to me). -func TestQueryLatestCommitsWithDocKeyAndFieldName(t *testing.T) { +func TestQueryLatestCommitsWithDocIDAndFieldName(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and field name", + Description: "Simple latest commits query with docID and field name", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "age") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "age") { cid links { cid @@ -46,11 +46,11 @@ func TestQueryLatestCommitsWithDocKeyAndFieldName(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryLatestCommitsWithDocKeyAndFieldId(t *testing.T) { +func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and field id", + Description: "Simple latest commits query with docID and field id", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "1") { cid links { cid @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithDocKeyAndFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "links": []map[string]any{}, }, }, @@ -79,11 +79,11 @@ func TestQueryLatestCommitsWithDocKeyAndFieldId(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (Users should not be specifying field ids). -func TestQueryLatestCommitsWithDocKeyAndCompositeFieldId(t *testing.T) { +func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and composite field id", + Description: "Simple latest commits query with docID and composite field id", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", fieldId: "C") { cid links { cid @@ -101,14 +101,14 @@ func TestQueryLatestCommitsWithDocKeyAndCompositeFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, diff --git a/tests/integration/query/latest_commits/with_dockey_prop_test.go b/tests/integration/query/latest_commits/with_doc_id_prop_test.go similarity index 75% rename from tests/integration/query/latest_commits/with_dockey_prop_test.go rename to tests/integration/query/latest_commits/with_doc_id_prop_test.go index b7ffd80d65..247d536532 100644 --- a/tests/integration/query/latest_commits/with_dockey_prop_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_prop_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryLastCommitsWithDockeyProperty(t *testing.T) { +func TestQueryLastCommitsWithDocIDProperty(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple latest commits query with dockey property", + Description: "Simple latest commits query with docID property", Actions: []any{ updateUserCollectionSchema(), testUtils.CreateDoc{ @@ -30,13 +30,13 @@ func TestQueryLastCommitsWithDockeyProperty(t *testing.T) { }, testUtils.Request{ Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { - dockey + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + docID } }`, Results: []map[string]any{ { - "dockey": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", + "docID": "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", }, }, }, diff --git a/tests/integration/query/latest_commits/with_dockey_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go similarity index 58% rename from tests/integration/query/latest_commits/with_dockey_test.go rename to tests/integration/query/latest_commits/with_doc_id_test.go index e07d34836f..55e0546cdf 100644 --- a/tests/integration/query/latest_commits/with_dockey_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -16,11 +16,11 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryLatestCommitsWithDocKey(t *testing.T) { +func TestQueryLatestCommitsWithDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey", + Description: "Simple latest commits query with docID", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid links { cid @@ -38,14 +38,14 @@ func TestQueryLatestCommitsWithDocKey(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", "links": []map[string]any{ { - "cid": "bafybeiazsz3twea2uxpen6452qqa7qnzp2xildfxliidhqk632jpvbixkm", + "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", "name": "age", }, { - "cid": "bafybeidzukbs36cwwhab4rkpi6jfhhxse2vjtc5tf767qda5valcinilmy", + "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", "name": "name", }, }, @@ -56,11 +56,11 @@ func TestQueryLatestCommitsWithDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQueryLatestCommitsWithDocKeyWithSchemaVersionIdField(t *testing.T) { +func TestQueryLatestCommitsWithDocIDWithSchemaVersionIdField(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple latest commits query with dockey and schema versiion id field", + Description: "Simple latest commits query with docID and schema versiion id field", Request: `query { - latestCommits(dockey: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { + latestCommits(docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7") { cid schemaVersionId } @@ -75,8 +75,8 @@ func TestQueryLatestCommitsWithDocKeyWithSchemaVersionIdField(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeihbcl2ijavd6vdcj4vgunw4q5qt5itmumxw7iy7fhoqfsuvkpkqeq", - "schemaVersionId": "bafkreictcre4pylafzzoh5lpgbetdodunz4r6pz3ormdzzpsz2lqtp4v34", + "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, } diff --git a/tests/integration/query/latest_commits/with_field_test.go b/tests/integration/query/latest_commits/with_field_test.go index 70c4635cd4..67ae607c47 100644 --- a/tests/integration/query/latest_commits/with_field_test.go +++ b/tests/integration/query/latest_commits/with_field_test.go @@ -39,7 +39,7 @@ func TestQueryLatestCommitsWithField(t *testing.T) { }`, }, }, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", } executeTestCase(t, test) @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithFieldId(t *testing.T) { }`, }, }, - ExpectedError: "Field \"latestCommits\" argument \"dockey\" of type \"ID!\" is required but not provided.", + ExpectedError: "Field \"latestCommits\" argument \"docID\" of type \"ID!\" is required but not provided.", } executeTestCase(t, test) diff --git a/tests/integration/query/one_to_many/with_cid_dockey_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go similarity index 81% rename from tests/integration/query/one_to_many/with_cid_dockey_test.go rename to tests/integration/query/one_to_many/with_cid_doc_id_test.go index be7589c707..56c324802f 100644 --- a/tests/integration/query/one_to_many/with_cid_dockey_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -18,13 +18,13 @@ import ( // This test is for documentation reasons only. This is not // desired behaviour (should just return empty). -// func TestQueryOneToManyWithUnknownCidAndDocKey(t *testing.T) { +// func TestQueryOneToManyWithUnknownCidAndDocID(t *testing.T) { // test := testUtils.RequestTestCase{ -// Description: "One-to-many relation query from one side with unknown cid and dockey", +// Description: "One-to-many relation query from one side with unknown cid and docID", // Request: `query { // Book ( // cid: "bafybeicgwjdyqyuntdop5ytpsfrqg5a4t2r25pfv6prfppl5ta5k5altca", -// dockey: "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" +// docID: "bae-fd541c25-229e-5280-b44b-e5c2af3e374d" // ) { // name // author { @@ -63,13 +63,13 @@ import ( // testUtils.AssertPanic(t, func() { executeTestCase(t, test) }) // } -func TestQueryOneToManyWithCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with cid and dockey", + Description: "One-to-many relation query from one side with cid and docID", Request: `query { Book ( - cid: "bafybeigq7vjp6btvgms2k6ajgtcvygv4bvejk5pin44jbib43rwqa2j64q" - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name author { @@ -112,13 +112,13 @@ func TestQueryOneToManyWithCidAndDocKey(t *testing.T) { // desired behaviour (no way to get state of child a time of // parent creation without explicit child cid, which is also not tied // to parent state). -func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with child update and parent cid and dockey", + Description: "One-to-many relation query from one side with child update and parent cid and docID", Request: `query { Book ( - cid: "bafybeigq7vjp6btvgms2k6ajgtcvygv4bvejk5pin44jbib43rwqa2j64q", - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name author { @@ -168,21 +168,22 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with parent update and parent cid and dockey", + Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { - Book ( - cid: "bafybeigq7vjp6btvgms2k6ajgtcvygv4bvejk5pin44jbib43rwqa2j64q", - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" - ) { + Book ( + cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + ) { + name + rating + author { name - rating - author { - name - } } - }`, + } + }`, + Docs: map[int][]string{ //books 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d @@ -224,13 +225,13 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQueryOneToManyWithParentUpdateAndLastCidAndDocKey(t *testing.T) { +func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with parent update and parent cid and dockey", + Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { Book ( - cid: "bafybeigukwqfzjxvuaok53gradxpvz7ag6l73b77lpjdcfglizmnv6zurq", - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + cid: "bafybeifnz3yz3rkd2bc2uv6i7ucfdlqji5wevs5anziwpr76ia45ygtbk4", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name rating diff --git a/tests/integration/query/one_to_many/with_dockey_test.go b/tests/integration/query/one_to_many/with_doc_id_test.go similarity index 91% rename from tests/integration/query/one_to_many/with_dockey_test.go rename to tests/integration/query/one_to_many/with_doc_id_test.go index fd75677c1c..34021f53ad 100644 --- a/tests/integration/query/one_to_many/with_dockey_test.go +++ b/tests/integration/query/one_to_many/with_doc_id_test.go @@ -16,14 +16,14 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryOneToManyWithChildDocKey(t *testing.T) { +func TestQueryOneToManyWithChildDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with child dockey", + Description: "One-to-many relation query from one side with child docID", Request: `query { Author { name published ( - dockey: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name } diff --git a/tests/integration/query/one_to_many/with_dockeys_test.go b/tests/integration/query/one_to_many/with_doc_ids_test.go similarity index 90% rename from tests/integration/query/one_to_many/with_dockeys_test.go rename to tests/integration/query/one_to_many/with_doc_ids_test.go index 1c58e5947f..821a24c334 100644 --- a/tests/integration/query/one_to_many/with_dockeys_test.go +++ b/tests/integration/query/one_to_many/with_doc_ids_test.go @@ -16,14 +16,14 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQueryOneToManyWithChildDocKeys(t *testing.T) { +func TestQueryOneToManyWithChildDocIDs(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with child dockeys", + Description: "One-to-many relation query from one side with child docIDs", Request: `query { Author { name published ( - dockeys: ["bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca"] + docIDs: ["bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca"] ) { name } diff --git a/tests/integration/query/one_to_many/with_filter_related_id_test.go b/tests/integration/query/one_to_many/with_filter_related_id_test.go index 87c895e5c1..98c3af4b59 100644 --- a/tests/integration/query/one_to_many/with_filter_related_id_test.go +++ b/tests/integration/query/one_to_many/with_filter_related_id_test.go @@ -22,7 +22,7 @@ func TestQueryFromManySideWithEqFilterOnRelatedType(t *testing.T) { Description: "One-to-many query from many side with _eq filter on related field type.", Request: `query { - Book(filter: {author: {_key: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}}) { + Book(filter: {author: {_docID: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}}) { name } }`, @@ -178,7 +178,7 @@ func TestQueryFromManySideWithSameFiltersInDifferentWayOnRelatedType(t *testing. Request: `query { Book( filter: { - author: {_key: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}, + author: {_docID: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"}}, author_id: {_eq: "bae-41598f0c-19bc-5da6-813b-e80f14a10df3"} } ) { @@ -258,7 +258,7 @@ func TestQueryFromSingleSideWithEqFilterOnRelatedType(t *testing.T) { Description: "One-to-many query from single side with _eq filter on related field type.", Request: `query { - Author(filter: {published: {_key: {_eq: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d"}}}) { + Author(filter: {published: {_docID: {_eq: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d"}}}) { name } }`, diff --git a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go index 7c813d9359..9f17d2ffe7 100644 --- a/tests/integration/query/one_to_many/with_group_related_id_alias_test.go +++ b/tests/integration/query/one_to_many/with_group_related_id_alias_test.go @@ -170,7 +170,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe Request: `query { Book(groupBy: [author]) { author { - _key + _docID name } _group { @@ -243,8 +243,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe Results: []map[string]any{ { "author": map[string]any{ - "name": "Voltaire", - "_key": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", + "name": "Voltaire", + "_docID": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", }, "_group": []map[string]any{ { @@ -267,8 +267,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, { "author": map[string]any{ - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", }, "_group": []map[string]any{ { @@ -299,8 +299,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeFromManySideUsingAliasAndRe }, { "author": map[string]any{ - "name": "Simon Pelloutier", - "_key": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", + "name": "Simon Pelloutier", + "_docID": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", }, "_group": []map[string]any{ { @@ -473,7 +473,7 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide Book(groupBy: [author]) { author_id author { - _key + _docID name } _group { @@ -547,8 +547,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide { "author_id": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", "author": map[string]any{ - "name": "Voltaire", - "_key": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", + "name": "Voltaire", + "_docID": "bae-7accaba8-ea9d-54b1-92f4-4a7ac5de88b3", }, "_group": []map[string]any{ { @@ -572,8 +572,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide { "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "author": map[string]any{ - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", }, "_group": []map[string]any{ { @@ -605,8 +605,8 @@ func TestQueryOneToManyWithParentGroupByOnRelatedTypeWithIDSelectionFromManySide { "author_id": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", "author": map[string]any{ - "name": "Simon Pelloutier", - "_key": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", + "name": "Simon Pelloutier", + "_docID": "bae-09d33399-197a-5b98-b135-4398f2b6de4c", }, "_group": []map[string]any{ { diff --git a/tests/integration/query/one_to_many_to_many/joins_test.go b/tests/integration/query/one_to_many_to_many/joins_test.go index 492bb10731..f883f9ae9f 100644 --- a/tests/integration/query/one_to_many_to_many/joins_test.go +++ b/tests/integration/query/one_to_many_to_many/joins_test.go @@ -21,13 +21,13 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { Description: "1-N-M Query to ensure joins are linked properly.", Request: `query { Author { - _key + _docID name book { - _key + _docID name publisher { - _key + _docID name } } @@ -140,59 +140,59 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { Results: []map[string]any{ { - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "book": []map[string]any{ { - "_key": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", + "_docID": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", "name": "The Associate", "publisher": []map[string]any{}, }, { - "_key": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", - "name": "Sooley", + "_docID": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", + "name": "Sooley", "publisher": []map[string]any{ { - "_key": "bae-cecb7841-fb4c-5403-a6d7-3654694dd073", - "name": "First of Two Publishers of Sooley", + "_docID": "bae-cecb7841-fb4c-5403-a6d7-3654694dd073", + "name": "First of Two Publishers of Sooley", }, { - "_key": "bae-d7e35ac3-dcf3-5537-91dd-3d27e378ba5d", - "name": "Second of Two Publishers of Sooley", + "_docID": "bae-d7e35ac3-dcf3-5537-91dd-3d27e378ba5d", + "name": "Second of Two Publishers of Sooley", }, }, }, { - "_key": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", - "name": "Theif Lord", + "_docID": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", + "name": "Theif Lord", "publisher": []map[string]any{ { - "_key": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", - "name": "Only Publisher of Theif Lord", + "_docID": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", + "name": "Only Publisher of Theif Lord", }, }, }, { - "_key": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", - "name": "Painted House", + "_docID": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", + "name": "Painted House", "publisher": []map[string]any{ { - "_key": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", - "name": "Only Publisher of Painted House", + "_docID": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", + "name": "Only Publisher of Painted House", }, }, }, { - "_key": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", - "name": "A Time for Mercy", + "_docID": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", + "name": "A Time for Mercy", "publisher": []map[string]any{ { - "_key": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", - "name": "Only Publisher of A Time for Mercy", + "_docID": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", + "name": "Only Publisher of A Time for Mercy", }, }, }, @@ -200,22 +200,22 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { }, { - "_key": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", - "book": []map[string]any{}, - "name": "Not a Writer", + "_docID": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", + "book": []map[string]any{}, + "name": "Not a Writer", }, { - "name": "Cornelia Funke", - "_key": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "name": "Cornelia Funke", + "_docID": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", "book": []map[string]any{ { - "_key": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", - "name": "The Rooster Bar", + "_docID": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", + "name": "The Rooster Bar", "publisher": []map[string]any{ { - "_key": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", - "name": "Only Publisher of The Rooster Bar", + "_docID": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", + "name": "Only Publisher of The Rooster Bar", }, }, }, diff --git a/tests/integration/query/one_to_many_to_one/joins_test.go b/tests/integration/query/one_to_many_to_one/joins_test.go index e30b1b699a..57b76a15b9 100644 --- a/tests/integration/query/one_to_many_to_one/joins_test.go +++ b/tests/integration/query/one_to_many_to_one/joins_test.go @@ -153,13 +153,13 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { testUtils.Request{ Request: `query { Author { - _key + _docID name book { - _key + _docID name publisher { - _key + _docID name } } @@ -167,63 +167,63 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { }`, Results: []map[string]any{ { - "name": "John Grisham", - "_key": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", + "name": "John Grisham", + "_docID": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "book": []map[string]any{ { - "_key": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", + "_docID": "bae-4fb9e3e9-d1d3-5404-bf15-10e4c995d9ca", "name": "The Associate", "publisher": nil, }, { - "_key": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", - "name": "Sooley", + "_docID": "bae-7ba73251-c935-5f44-ac04-d2061149cc14", + "name": "Sooley", "publisher": map[string]any{ - "_key": "bae-cd2a319a-e013-559e-aad9-282b48fd3f72", - "name": "Only Publisher of Sooley", + "_docID": "bae-cd2a319a-e013-559e-aad9-282b48fd3f72", + "name": "Only Publisher of Sooley", }, }, { - "_key": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", - "name": "Theif Lord", + "_docID": "bae-b8091c4f-7594-5d7a-98e8-272aadcedfdf", + "name": "Theif Lord", "publisher": map[string]any{ - "_key": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", - "name": "Only Publisher of Theif Lord", + "_docID": "bae-1a3ca715-3f3c-5934-9133-d7b489d57f88", + "name": "Only Publisher of Theif Lord", }, }, { - "_key": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", - "name": "Painted House", + "_docID": "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d", + "name": "Painted House", "publisher": map[string]any{ - "_key": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", - "name": "Only Publisher of Painted House", + "_docID": "bae-6412f5ff-a69a-5472-8647-18bf2b247697", + "name": "Only Publisher of Painted House", }, }, { - "_key": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", - "name": "A Time for Mercy", + "_docID": "bae-c674e3b0-ebb6-5b89-bfa3-d1128288d21a", + "name": "A Time for Mercy", "publisher": map[string]any{ - "_key": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", - "name": "Only Publisher of A Time for Mercy", + "_docID": "bae-2f83fa75-241f-517d-9b47-3715feee43c1", + "name": "Only Publisher of A Time for Mercy", }, }, }, }, { - "_key": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", - "book": []map[string]any{}, - "name": "Not a Writer", + "_docID": "bae-7ba214a4-5ac8-5878-b221-dae6c285ef41", + "book": []map[string]any{}, + "name": "Not a Writer", }, { - "name": "Cornelia Funke", - "_key": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", + "name": "Cornelia Funke", + "_docID": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", "book": []map[string]any{ { - "_key": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", - "name": "The Rooster Bar", + "_docID": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", + "name": "The Rooster Bar", "publisher": map[string]any{ - "_key": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", - "name": "Only Publisher of The Rooster Bar", + "_docID": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", + "name": "Only Publisher of The Rooster Bar", }, }, }, diff --git a/tests/integration/query/simple/simple_test.go b/tests/integration/query/simple/simple_test.go index 6911b08ea8..abdc0cd1f3 100644 --- a/tests/integration/query/simple/simple_test.go +++ b/tests/integration/query/simple/simple_test.go @@ -21,7 +21,7 @@ func TestQuerySimple(t *testing.T) { Description: "Simple query with no filter", Request: `query { Users { - _key + _docID Name Age } @@ -36,9 +36,9 @@ func TestQuerySimple(t *testing.T) { }, Results: []map[string]any{ { - "_key": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", - "Name": "John", - "Age": int64(21), + "_docID": "bae-52b9170d-b77a-5887-b877-cbdbb99b009f", + "Name": "John", + "Age": int64(21), }, }, } diff --git a/tests/integration/query/simple/with_cid_dockey_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go similarity index 67% rename from tests/integration/query/simple/with_cid_dockey_test.go rename to tests/integration/query/simple/with_cid_doc_id_test.go index 7bd1eb4971..1fa00d05d1 100644 --- a/tests/integration/query/simple/with_cid_dockey_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -16,13 +16,13 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithInvalidCidAndInvalidDocKey(t *testing.T) { +func TestQuerySimpleWithInvalidCidAndInvalidDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with invalid cid and invalid dockey", + Description: "Simple query with invalid cid and invalid docID", Request: `query { Users ( cid: "any non-nil string value - this will be ignored", - dockey: "invalid docKey" + docID: "invalid docID" ) { Name } @@ -43,13 +43,13 @@ func TestQuerySimpleWithInvalidCidAndInvalidDocKey(t *testing.T) { // This test is for documentation reasons only. This is not // desired behaviour (should just return empty). -func TestQuerySimpleWithUnknownCidAndInvalidDocKey(t *testing.T) { +func TestQuerySimpleWithUnknownCidAndInvalidDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with unknown cid and invalid dockey", + Description: "Simple query with unknown cid and invalid docID", Request: `query { Users ( cid: "bafybeid57gpbwi4i6bg7g357vwwyzsmr4bjo22rmhoxrwqvdxlqxcgaqvu", - dockey: "invalid docKey" + docID: "invalid docID" ) { Name } @@ -68,13 +68,13 @@ func TestQuerySimpleWithUnknownCidAndInvalidDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with cid and dockey", + Description: "Simple query with cid and docID", Request: `query { Users ( - cid: "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name } @@ -97,13 +97,13 @@ func TestQuerySimpleWithCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndFirstCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (first) cid and dockey", + Description: "Simple query with (first) cid and docID", Request: `query { Users ( - cid: "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name Age @@ -138,13 +138,13 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndLastCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (last) cid and dockey", + Description: "Simple query with (last) cid and docID", Request: `query { Users ( - cid: "bafybeiav54zfepx5n2zcm2g34q5ur5w2dosb2ssxjckq3esy5dg6nftxse" - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeibnj6yitgmynodaxnvtl22rhzclhsrc5asmocwyccsbsamobibpsy", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name Age @@ -179,13 +179,13 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndMiddleCidAndDocKey(t *testing.T) { +func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (middle) cid and dockey", + Description: "Simple query with (middle) cid and docID", Request: `query { Users ( - cid: "bafybeicrati3sbl3esju7eus3dwi53aggd6thhtporh7vj5mv77vvs3mdy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + cid: "bafybeify36bauenmsov4rijdmency367boy234mjezpvg4dj6r47ay3jwq", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name Age @@ -220,21 +220,22 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocKey(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithUpdateAndFirstCidAndDocKeyAndSchemaVersion(t *testing.T) { +func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with (first) cid and dockey and yielded schema version", + Description: "Simple query with (first) cid and docID and yielded schema version", Request: `query { - Users ( - cid: "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", - dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" - ) { - Name - Age - _version { - schemaVersionId - } - } - }`, + Users ( + cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + ) { + Name + Age + _version { + schemaVersionId + } + } + }`, + Docs: map[int][]string{ 0: { `{ @@ -259,7 +260,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocKeyAndSchemaVersion(t *testing.T) "Age": int64(21), "_version": []map[string]any{ { - "schemaVersionId": "bafkreicqyapc7zxw5tt2ymybau5m54lhmm5ahrl22oaktnhidul757a4ba", + "schemaVersionId": "bafkreidvd63bawkelxe3wtf7a65klkq4x3dvenqafyasndyal6fvffkeam", }, }, }, diff --git a/tests/integration/query/simple/with_key_test.go b/tests/integration/query/simple/with_doc_id_filter_test.go similarity index 79% rename from tests/integration/query/simple/with_key_test.go rename to tests/integration/query/simple/with_doc_id_filter_test.go index f6854da643..5477665e1c 100644 --- a/tests/integration/query/simple/with_key_test.go +++ b/tests/integration/query/simple/with_doc_id_filter_test.go @@ -16,11 +16,11 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithKeyFilterBlock(t *testing.T) { +func TestQuerySimpleWithDocIDFilterBlock(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with basic filter (key by filter block)", + Description: "Simple query with basic filter (docID by filter block)", Request: `query { - Users(filter: {_key: {_eq: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"}}) { + Users(filter: {_docID: {_eq: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"}}) { Name Age } diff --git a/tests/integration/query/simple/with_dockey_test.go b/tests/integration/query/simple/with_doc_id_test.go similarity index 73% rename from tests/integration/query/simple/with_dockey_test.go rename to tests/integration/query/simple/with_doc_id_test.go index 5af4dac7ab..6067baea38 100644 --- a/tests/integration/query/simple/with_dockey_test.go +++ b/tests/integration/query/simple/with_doc_id_test.go @@ -16,12 +16,12 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithDocKeyFilter(t *testing.T) { +func TestQuerySimpleWithDocIDFilter(t *testing.T) { tests := []testUtils.RequestTestCase{ { - Description: "Simple query with basic filter (key by DocKey arg)", + Description: "Simple query with basic filter (by docID arg)", Request: `query { - Users(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { + Users(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { Name Age } @@ -42,9 +42,9 @@ func TestQuerySimpleWithDocKeyFilter(t *testing.T) { }, }, { - Description: "Simple query with basic filter (key by DocKey arg), no results", + Description: "Simple query with basic filter (by docID arg), no results", Request: `query { - Users(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009g") { + Users(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009g") { Name Age } @@ -60,9 +60,9 @@ func TestQuerySimpleWithDocKeyFilter(t *testing.T) { Results: []map[string]any{}, }, { - Description: "Simple query with basic filter (key by DocKey arg), partial results", + Description: "Simple query with basic filter (by docID arg), partial results", Request: `query { - Users(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { + Users(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { Name Age } diff --git a/tests/integration/query/simple/with_dockeys_test.go b/tests/integration/query/simple/with_doc_ids_test.go similarity index 68% rename from tests/integration/query/simple/with_dockeys_test.go rename to tests/integration/query/simple/with_doc_ids_test.go index 8bbd0067da..c28fb5d075 100644 --- a/tests/integration/query/simple/with_dockeys_test.go +++ b/tests/integration/query/simple/with_doc_ids_test.go @@ -16,12 +16,12 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithDocKeysFilter(t *testing.T) { +func TestQuerySimpleWithDocIDsFilter(t *testing.T) { tests := []testUtils.RequestTestCase{ { - Description: "Simple query with basic filter (single key by DocKeys arg)", + Description: "Simple query with basic filter (single ID by docIDs arg)", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { Name Age } @@ -42,9 +42,9 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { }, }, { - Description: "Simple query with basic filter (single key by DocKeys arg), no results", + Description: "Simple query with basic filter (single ID by docIDs arg), no results", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009g"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009g"]) { Name Age } @@ -60,9 +60,9 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { Results: []map[string]any{}, }, { - Description: "Simple query with basic filter (duplicate key by DocKeys arg), partial results", + Description: "Simple query with basic filter (duplicate ID by docIDs arg), partial results", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-52b9170d-b77a-5887-b877-cbdbb99b009f"]) { Name Age } @@ -87,9 +87,9 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { }, }, { - Description: "Simple query with basic filter (multiple key by DocKeys arg), partial results", + Description: "Simple query with basic filter (multiple ID by docIDs arg), partial results", Request: `query { - Users(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-1378ab62-e064-5af4-9ea6-49941c8d8f94"]) { + Users(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-1378ab62-e064-5af4-9ea6-49941c8d8f94"]) { Name Age } @@ -128,11 +128,11 @@ func TestQuerySimpleWithDocKeysFilter(t *testing.T) { } } -func TestQuerySimpleReturnsNothinGivenEmptyDocKeysFilter(t *testing.T) { +func TestQuerySimpleReturnsNothinGivenEmptyDocIDsFilter(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with empty DocKeys arg", + Description: "Simple query with empty docIDs arg", Request: `query { - Users(dockeys: []) { + Users(docIDs: []) { Name Age } diff --git a/tests/integration/query/simple/with_group_dockey_test.go b/tests/integration/query/simple/with_group_doc_id_test.go similarity index 85% rename from tests/integration/query/simple/with_group_dockey_test.go rename to tests/integration/query/simple/with_group_doc_id_test.go index c40a27efc2..177934ebdc 100644 --- a/tests/integration/query/simple/with_group_dockey_test.go +++ b/tests/integration/query/simple/with_group_doc_id_test.go @@ -16,13 +16,13 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithGroupByWithGroupWithDocKey(t *testing.T) { +func TestQuerySimpleWithGroupByWithGroupWithDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with DocKey filter on _group", + Description: "Simple query with docID filter on _group", Request: `query { Users(groupBy: [Age]) { Age - _group(dockey: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { + _group(docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f") { Name } } diff --git a/tests/integration/query/simple/with_group_dockeys_test.go b/tests/integration/query/simple/with_group_doc_ids_test.go similarity index 83% rename from tests/integration/query/simple/with_group_dockeys_test.go rename to tests/integration/query/simple/with_group_doc_ids_test.go index 8d11607819..9db3bae934 100644 --- a/tests/integration/query/simple/with_group_dockeys_test.go +++ b/tests/integration/query/simple/with_group_doc_ids_test.go @@ -16,13 +16,13 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestQuerySimpleWithGroupByWithGroupWithDocKeys(t *testing.T) { +func TestQuerySimpleWithGroupByWithGroupWithDocIDs(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "Simple query with DocKeys filter on _group", + Description: "Simple query with docIDs filter on _group", Request: `query { Users(groupBy: [Age]) { Age - _group(dockeys: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-9b2e1434-9d61-5eb1-b3b9-82e8e40729a7"]) { + _group(docIDs: ["bae-52b9170d-b77a-5887-b877-cbdbb99b009f", "bae-9b2e1434-9d61-5eb1-b3b9-82e8e40729a7"]) { Name } } diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 868d3b54af..2aa571eff7 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -46,14 +46,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", + "cid": "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", "links": []map[string]any{ { - "cid": "bafybeibphw52n3t5nn2xn32sfdsf4hbll3iddsc6or2ebnnrmpz2cbovyy", + "cid": "bafybeigpazmunkmlf5p5jw6fl4supfslupgp2kksvqr7quvhfhsddfa44e", "name": "Age", }, { - "cid": "bafybeifgqmrklbyw3x35zzzao3d7baownrv3z4v7vzfbmk2r5omv5icgu4", + "cid": "bafybeibxsjz4krbv3jcbobpdm2igdcvunitu332o6ebsxup53wglkyn6ee", "name": "Name", }, }, @@ -90,7 +90,7 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { "Name": "John", "_version": []map[string]any{ { - "schemaVersionId": "bafkreicqyapc7zxw5tt2ymybau5m54lhmm5ahrl22oaktnhidul757a4ba", + "schemaVersionId": "bafkreidvd63bawkelxe3wtf7a65klkq4x3dvenqafyasndyal6fvffkeam", }, }, }, @@ -100,17 +100,17 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithSchemaVersionId(t *testing.T) { executeTestCase(t, test) } -func TestQuerySimpleWithEmbeddedLatestCommitWithDockey(t *testing.T) { - const dockey = "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" +func TestQuerySimpleWithEmbeddedLatestCommitWithDocID(t *testing.T) { + const docID = "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" test := testUtils.RequestTestCase{ - Description: "Embedded commits query within object query with dockey", + Description: "Embedded commits query within object query with document ID", Request: `query { Users { Name - _key + _docID _version { - dockey + docID } } }`, @@ -124,11 +124,11 @@ func TestQuerySimpleWithEmbeddedLatestCommitWithDockey(t *testing.T) { }, Results: []map[string]any{ { - "Name": "John", - "_key": dockey, + "Name": "John", + "_docID": docID, "_version": []map[string]any{ { - "dockey": dockey, + "docID": docID, }, }, }, @@ -171,14 +171,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeieybepwqpy5h2d4sywksgvdqpjd44ciu223vrm7knumychpmucawy", + "cid": "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", "L1": []map[string]any{ { - "cid": "bafybeibphw52n3t5nn2xn32sfdsf4hbll3iddsc6or2ebnnrmpz2cbovyy", + "cid": "bafybeigpazmunkmlf5p5jw6fl4supfslupgp2kksvqr7quvhfhsddfa44e", "name": "Age", }, { - "cid": "bafybeifgqmrklbyw3x35zzzao3d7baownrv3z4v7vzfbmk2r5omv5icgu4", + "cid": "bafybeibxsjz4krbv3jcbobpdm2igdcvunitu332o6ebsxup53wglkyn6ee", "name": "Name", }, }, diff --git a/tests/integration/schema/aggregates/inline_array_test.go b/tests/integration/schema/aggregates/inline_array_test.go index f5c6199e39..75c9d76414 100644 --- a/tests/integration/schema/aggregates/inline_array_test.go +++ b/tests/integration/schema/aggregates/inline_array_test.go @@ -405,7 +405,7 @@ var aggregateGroupArg = map[string]any{ }, }, map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "IDOperatorBlock", }, diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 96a3b98a56..97671738fb 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -10,7 +10,11 @@ package schema -import "sort" +import ( + "sort" + + "github.com/sourcenetwork/defradb/client/request" +) type Field = map[string]any type fields []Field @@ -76,7 +80,7 @@ var DefaultEmbeddedObjFields = concat( ) var keyField = Field{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "kind": "SCALAR", "name": "ID", @@ -138,15 +142,15 @@ var cidArg = Field{ "inputFields": nil, }, } -var dockeyArg = Field{ - "name": "dockey", +var docIDArg = Field{ + "name": request.DocIDArgName, "type": map[string]any{ "name": "String", "inputFields": nil, }, } -var dockeysArg = Field{ - "name": "dockeys", +var docIDsArg = Field{ + "name": request.DocIDsArgName, "type": map[string]any{ "name": nil, "inputFields": nil, @@ -201,7 +205,7 @@ type argDef struct { func buildOrderArg(objectName string, fields []argDef) Field { inputFields := []any{ - makeInputObject("_key", "Ordering", nil), + makeInputObject("_docID", "Ordering", nil), } for _, field := range fields { @@ -226,7 +230,7 @@ func buildFilterArg(objectName string, fields []argDef) Field { "kind": "INPUT_OBJECT", "name": filterArgName, }), - makeInputObject("_key", "IDOperatorBlock", nil), + makeInputObject("_docID", "IDOperatorBlock", nil), makeInputObject("_not", filterArgName, nil), makeInputObject("_or", nil, map[string]any{ "kind": "INPUT_OBJECT", diff --git a/tests/integration/schema/filter_test.go b/tests/integration/schema/filter_test.go index 17f38408bf..e3780a3653 100644 --- a/tests/integration/schema/filter_test.go +++ b/tests/integration/schema/filter_test.go @@ -76,7 +76,7 @@ func TestFilterForSimpleSchema(t *testing.T) { }, }, map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "IDOperatorBlock", "ofType": nil, @@ -132,8 +132,8 @@ var testFilterForSimpleSchemaArgProps = map[string]any{ var defaultUserArgsWithoutFilter = trimFields( fields{ cidArg, - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, showDeletedArg, groupByArg, limitArg, @@ -214,7 +214,7 @@ func TestFilterForOneToOneSchema(t *testing.T) { }, }, map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "IDOperatorBlock", "ofType": nil, @@ -284,8 +284,8 @@ var testFilterForOneToOneSchemaArgProps = map[string]any{ var defaultBookArgsWithoutFilter = trimFields( fields{ cidArg, - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, showDeletedArg, groupByArg, limitArg, diff --git a/tests/integration/schema/get_schema_test.go b/tests/integration/schema/get_schema_test.go index e6d5f166ac..ae63d49812 100644 --- a/tests/integration/schema/get_schema_test.go +++ b/tests/integration/schema/get_schema_test.go @@ -71,9 +71,9 @@ func TestGetSchema_GivenNoSchemaGivenUnknownName(t *testing.T) { } func TestGetSchema_ReturnsAllSchema(t *testing.T) { - usersSchemaVersion1ID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" - usersSchemaVersion2ID := "bafkreicseqwxooxo2wf2bgzdalwtm2rtsj7x4mgsir4rp4htmpnwnffwre" - booksSchemaVersion1ID := "bafkreigbfibfn7g6neen2gghc54dzocexefi7vshc3opgvy6j7jflar2nm" + usersSchemaVersion1ID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" + usersSchemaVersion2ID := "bafkreiabmj6ypcc6alqswrscgpj6rqbhogsojgv7fopr5rgrluvxtwente" + booksSchemaVersion1ID := "bafkreiaiku34mjr2za5yo6yc4pzoupenwzjq7d5pclgfdiihdnjq33fn5y" test := testUtils.TestCase{ Actions: []any{ @@ -97,25 +97,14 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { }, testUtils.GetSchema{ ExpectedResults: []client.SchemaDescription{ - { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - }, - }, { Name: "Users", Root: usersSchemaVersion1ID, VersionID: usersSchemaVersion2ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -132,8 +121,19 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { VersionID: booksSchemaVersion1ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + }, + }, + { + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, }, }, }, @@ -146,8 +146,8 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { - usersSchemaVersion1ID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" - usersSchemaVersion2ID := "bafkreicseqwxooxo2wf2bgzdalwtm2rtsj7x4mgsir4rp4htmpnwnffwre" + usersSchemaVersion1ID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" + usersSchemaVersion2ID := "bafkreiabmj6ypcc6alqswrscgpj6rqbhogsojgv7fopr5rgrluvxtwente" test := testUtils.TestCase{ Actions: []any{ @@ -172,25 +172,14 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { testUtils.GetSchema{ Root: immutable.Some(usersSchemaVersion1ID), ExpectedResults: []client.SchemaDescription{ - { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - }, - }, { Name: "Users", Root: usersSchemaVersion1ID, VersionID: usersSchemaVersion2ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -201,6 +190,17 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { }, }, }, + { + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + }, + }, }, }, }, @@ -210,8 +210,8 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { } func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { - usersSchemaVersion1ID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" - usersSchemaVersion2ID := "bafkreicseqwxooxo2wf2bgzdalwtm2rtsj7x4mgsir4rp4htmpnwnffwre" + usersSchemaVersion1ID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" + usersSchemaVersion2ID := "bafkreiabmj6ypcc6alqswrscgpj6rqbhogsojgv7fopr5rgrluvxtwente" test := testUtils.TestCase{ Actions: []any{ @@ -236,25 +236,14 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { testUtils.GetSchema{ Name: immutable.Some("Users"), ExpectedResults: []client.SchemaDescription{ - { - Name: "Users", - Root: usersSchemaVersion1ID, - VersionID: usersSchemaVersion1ID, - Fields: []client.FieldDescription{ - { - Name: "_key", - Kind: client.FieldKind_DocKey, - }, - }, - }, { Name: "Users", Root: usersSchemaVersion1ID, VersionID: usersSchemaVersion2ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -265,6 +254,17 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { }, }, }, + { + Name: "Users", + Root: usersSchemaVersion1ID, + VersionID: usersSchemaVersion1ID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + }, + }, }, }, }, diff --git a/tests/integration/schema/group_test.go b/tests/integration/schema/group_test.go index 35a5171c73..5ac89b95ec 100644 --- a/tests/integration/schema/group_test.go +++ b/tests/integration/schema/group_test.go @@ -62,7 +62,7 @@ func TestGroupByFieldForTheManySideInSchema(t *testing.T) { // Internal fields. map[string]any{"name": "_deleted"}, map[string]any{"name": "_group"}, - map[string]any{"name": "_key"}, + map[string]any{"name": "_docID"}, map[string]any{"name": "_version"}, // User defined schema fields> @@ -124,7 +124,7 @@ func TestGroupByFieldForTheSingleSideInSchema(t *testing.T) { // Internal fields. map[string]any{"name": "_deleted"}, map[string]any{"name": "_group"}, - map[string]any{"name": "_key"}, + map[string]any{"name": "_docID"}, map[string]any{"name": "_version"}, // User defined schema fields> diff --git a/tests/integration/schema/input_type_test.go b/tests/integration/schema/input_type_test.go index e50920dc3b..c9798ffc9f 100644 --- a/tests/integration/schema/input_type_test.go +++ b/tests/integration/schema/input_type_test.go @@ -77,8 +77,8 @@ func TestInputTypeOfOrderFieldWhereSchemaHasManyRelationType(t *testing.T) { "args": append( trimFields( fields{ - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, buildFilterArg("group", []argDef{ { fieldName: "members", @@ -98,7 +98,7 @@ func TestInputTypeOfOrderFieldWhereSchemaHasManyRelationType(t *testing.T) { "ofType": nil, "inputFields": []any{ map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "Ordering", "ofType": nil, @@ -187,7 +187,7 @@ func TestInputTypeOfOrderFieldWhereSchemaHasRelationType(t *testing.T) { "ofType": nil, "inputFields": []any{ map[string]any{ - "name": "_key", + "name": "_docID", "type": map[string]any{ "name": "Ordering", "ofType": nil, @@ -258,8 +258,8 @@ var testInputTypeOfOrderFieldWhereSchemaHasRelationTypeArgProps = map[string]any var defaultGroupArgsWithoutOrder = trimFields( fields{ - dockeyArg, - dockeysArg, + docIDArg, + docIDsArg, buildFilterArg("author", []argDef{ { fieldName: "age", diff --git a/tests/integration/schema/migrations/query/simple_test.go b/tests/integration/schema/migrations/query/simple_test.go index a13fd32be9..b758356cac 100644 --- a/tests/integration/schema/migrations/query/simple_test.go +++ b/tests/integration/schema/migrations/query/simple_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQuery(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -115,8 +115,8 @@ func TestSchemaMigrationQueryMultipleDocs(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -178,8 +178,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredBeforeSchemaPatch(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -254,8 +254,8 @@ func TestSchemaMigrationQueryMigratesToIntermediaryVersion(t *testing.T) { // Register a migration from schema version 1 to schema version 2 **only** - // there should be no migration from version 2 to version 3. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -325,7 +325,7 @@ func TestSchemaMigrationQueryMigratesFromIntermediaryVersion(t *testing.T) { // Register a migration from schema version 2 to schema version 3 **only** - // there should be no migration from version 1 to version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", DestinationSchemaVersionID: "bafkreigrpkox3omi3c3sp5zoupcjg2b32mysztjozaqsceafsdtkadzufe", Lens: model.Lens{ Lenses: []model.LensModule{ @@ -394,8 +394,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -411,7 +411,7 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", DestinationSchemaVersionID: "bafkreigrpkox3omi3c3sp5zoupcjg2b32mysztjozaqsceafsdtkadzufe", Lens: model.Lens{ Lenses: []model.LensModule{ @@ -539,8 +539,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingScalarField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -600,8 +600,8 @@ func TestSchemaMigrationQueryMigrationMutatesExistingInlineArrayField(t *testing }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreidvca2kcxlxab2wi25xhiyxmug66css4cqzqqxu4rdyuanl7u5rde", - DestinationSchemaVersionID: "bafkreiedmg3qox3a24rkhkx3wahahpyixlxkicetsk3ctkh3f7xcbdrrli", + SourceSchemaVersionID: "bafkreigjtl5r3lq6dkbod766let7ewqirc2ai6l2c5j5fxxc43zmvqqs24", + DestinationSchemaVersionID: "bafkreicwipnhoplttqy7spj2ksgk7vwmxmdtwt6g23os2kmqgvb22wfg3m", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -663,8 +663,8 @@ func TestSchemaMigrationQueryMigrationRemovesExistingField(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -724,8 +724,8 @@ func TestSchemaMigrationQueryMigrationPreservesExistingFieldWhenFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -798,8 +798,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcFieldNotRequeste }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -860,8 +860,8 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcAndDstFieldNotRe }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiggbvwwiqmzid4qnklwwdyu7mwhbbjy3ejss3x7uw7zxw6ivmmj6u", - DestinationSchemaVersionID: "bafkreiat3mfdsoknsavvw3wbir4atbaswqbnnitn3ysswqih2g4zwbn62a", + SourceSchemaVersionID: "bafkreicnoqat3exmvikr36xu3hhrkvay3d3cif24tezgsyvrydpobk2nqm", + DestinationSchemaVersionID: "bafkreigamaevrkcknutb275x3uxpgc2sn73qsfvkjqli7fiqaxfnniunjy", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_dockey_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go similarity index 81% rename from tests/integration/schema/migrations/query/with_dockey_test.go rename to tests/integration/schema/migrations/query/with_doc_id_test.go index a3a983d8bb..2ce1fd8ac3 100644 --- a/tests/integration/schema/migrations/query/with_dockey_test.go +++ b/tests/integration/schema/migrations/query/with_doc_id_test.go @@ -21,9 +21,9 @@ import ( ) // This test asserts that spans are being passed correctly through the new Lens fetcher. -func TestSchemaMigrationQueryByDocKey(t *testing.T) { +func TestSchemaMigrationQueryByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema migration, query by key", + Description: "Test schema migration, query by docID", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -52,8 +52,8 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -69,7 +69,7 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { + Users (docID: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { name verified } @@ -88,7 +88,7 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { } // This test asserts that lenses are being correctly returned to the pool for reuse after -// fetch completion. Querying by dockey should mean that the fetcher only scans the dockey +// fetch completion. Querying by docID should mean that the fetcher only scans the docID // prefix, and thus will only migrate a single document per query (unlike filters etc which // will migrate all documents at the time of writing). If the return mechanic was very faulty // then this test *should* deadlock. @@ -99,9 +99,9 @@ func TestSchemaMigrationQueryByDocKey(t *testing.T) { // // At the time of writing, the lens pool size is hardcoded to 5, so we should test with 6 // documents/queries, if the size changes so should this test. -func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { +func TestSchemaMigrationQueryMultipleQueriesByDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema migration, multiple queries by key", + Description: "Test schema migration, multiple queries by docID", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -158,8 +158,8 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -175,7 +175,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { + Users (docID: "bae-d7546ac1-c133-5853-b866-9b9f926fe7e5") { name verified } @@ -189,7 +189,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-92393ad0-07b6-5753-8dbb-19c9c41374ed") { + Users (docID: "bae-92393ad0-07b6-5753-8dbb-19c9c41374ed") { name verified } @@ -203,7 +203,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-403d7337-f73e-5c81-8719-e853938c8985") { + Users (docID: "bae-403d7337-f73e-5c81-8719-e853938c8985") { name verified } @@ -217,7 +217,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { + Users (docID: "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad") { name verified } @@ -231,7 +231,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-3f1174ba-d9bc-5a6a-b0bc-8f19581f199d") { + Users (docID: "bae-3f1174ba-d9bc-5a6a-b0bc-8f19581f199d") { name verified } @@ -245,7 +245,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocKey(t *testing.T) { }, testUtils.Request{ Request: `query { - Users (dockey: "bae-0698bda7-2c69-5028-a26a-0a1c491b793b") { + Users (docID: "bae-0698bda7-2c69-5028-a26a-0a1c491b793b") { name verified } diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index 0fc5d2da79..4b06bf6586 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -46,8 +46,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu", - DestinationSchemaVersionID: "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a", + SourceSchemaVersionID: "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du", + DestinationSchemaVersionID: "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -136,8 +136,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu", - DestinationSchemaVersionID: "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a", + SourceSchemaVersionID: "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du", + DestinationSchemaVersionID: "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -238,7 +238,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSch // Register a migration from version 2 to version 3 on both nodes. // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", DestinationSchemaVersionID: "bafkreigrpkox3omi3c3sp5zoupcjg2b32mysztjozaqsceafsdtkadzufe", Lens: model.Lens{ Lenses: []model.LensModule{ diff --git a/tests/integration/schema/migrations/query/with_restart_test.go b/tests/integration/schema/migrations/query/with_restart_test.go index deac59c725..3b51c92ada 100644 --- a/tests/integration/schema/migrations/query/with_restart_test.go +++ b/tests/integration/schema/migrations/query/with_restart_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithRestart(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_set_default_test.go b/tests/integration/schema/migrations/query/with_set_default_test.go index 9d61d609e7..55a3fc9968 100644 --- a/tests/integration/schema/migrations/query/with_set_default_test.go +++ b/tests/integration/schema/migrations/query/with_set_default_test.go @@ -22,7 +22,7 @@ import ( ) func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t *testing.T) { - schemaVersionID2 := "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a" + schemaVersionID2 := "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -50,7 +50,7 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu", + SourceSchemaVersionID: "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du", DestinationSchemaVersionID: schemaVersionID2, Lens: model.Lens{ Lenses: []model.LensModule{ @@ -89,8 +89,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * } func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t *testing.T) { - schemaVersionID1 := "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu" - schemaVersionID2 := "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a" + schemaVersionID1 := "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du" + schemaVersionID2 := "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm" test := testUtils.TestCase{ Description: "Test schema migration", @@ -164,8 +164,8 @@ func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t } func TestSchemaMigrationQuery_WithSetDefaultToOriginalVersionThatDocWasCreatedAt_ClearsMigrations(t *testing.T) { - schemaVersionID1 := "bafkreiguj3z5egyieou3li6aeyhabgtpk4mtl6nr7jjmyoihc6dmdg6vbu" - schemaVersionID2 := "bafkreidj4ipbeqhqn7at7du4vhzk3aw4xswbwccwqhbcab6avlgdeu6w2a" + schemaVersionID1 := "bafkreibgg4ex7aya4w4x3dnrlyov4juyuffjjokzkjrpoupncfuvsyi6du" + schemaVersionID2 := "bafkreidvp3xozpau2zanh7s5or4fhr7kchm6klznsyzd7fpcm3sh2xlgfm" test := testUtils.TestCase{ Description: "Test schema migration", diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index fcd01d6748..4bb0395365 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -47,8 +47,8 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -109,8 +109,8 @@ func TestSchemaMigrationQueryWithTxnAndCommit(t *testing.T) { testUtils.ConfigureMigration{ TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index 478ffd8e24..9fbf2b914a 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -45,8 +45,8 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -129,8 +129,8 @@ func TestSchemaMigrationQueryWithMigrationRegisteredAfterUpdate(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index c1982f5325..29769f1bac 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -91,8 +91,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -124,8 +124,8 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, }, { - SourceSchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", - DestinationSchemaVersionID: "bafkreiaa3njstjciqclhh4dzv2xaw32tfxxbrbembdvwqfmuuqai3ghu7a", + SourceSchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", + DestinationSchemaVersionID: "bafkreia4m6sn2rfypj2velvwpyude22fcb5jyfzum2eh3cdzg4a3myj5nu", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index 9fa0eb021a..ed8e05abf7 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -20,7 +20,7 @@ import ( ) func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { - schemaVersionID := "bafkreickgf3nbjaairxkkqawmrv7fafaafyccl4qygqeveagisdn42eohu" + schemaVersionID := "bafkreicavrlknsnfqey6nfwthyiguvv4dqcwhvywl5j6socx3vvjt4zqte" test := testUtils.TestCase{ Actions: []any{ @@ -52,8 +52,8 @@ func TestSchemaSimpleCreatesSchemaGivenEmptyType(t *testing.T) { Root: schemaVersionID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, }, }, }, diff --git a/tests/integration/schema/updates/add/field/create_test.go b/tests/integration/schema/updates/add/field/create_test.go index d59df6c294..af771d025c 100644 --- a/tests/integration/schema/updates/add/field/create_test.go +++ b/tests/integration/schema/updates/add/field/create_test.go @@ -43,16 +43,16 @@ func TestSchemaUpdatesAddFieldWithCreate(t *testing.T) { testUtils.Request{ Request: `query { Users { - _key + _docID name email } }`, Results: []map[string]any{ { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", - "name": "John", - "email": nil, + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "name": "John", + "email": nil, }, }, }, @@ -98,21 +98,21 @@ func TestSchemaUpdatesAddFieldWithCreateAfterSchemaUpdate(t *testing.T) { testUtils.Request{ Request: `query { Users { - _key + _docID name email } }`, Results: []map[string]any{ { - "_key": "bae-1ff978e7-b6ab-5ca7-8344-7fdcff65f94e", - "name": "Shahzad", - "email": "sqlizded@yahoo.ca", + "_docID": "bae-1ff978e7-b6ab-5ca7-8344-7fdcff65f94e", + "name": "Shahzad", + "email": "sqlizded@yahoo.ca", }, { - "_key": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", - "name": "John", - "email": nil, + "_docID": "bae-decf6467-4c7c-50d7-b09d-0a7097ef6bad", + "name": "John", + "email": nil, }, }, }, diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index 7cf8af8480..12cf973d59 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -17,8 +17,8 @@ import ( ) func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoin(t *testing.T) { - initialSchemaVersionId := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - updatedSchemaVersionId := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + initialSchemaVersionId := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + updatedSchemaVersionId := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, version join", @@ -105,8 +105,8 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi } func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndCommitQuery(t *testing.T) { - initialSchemaVersionId := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - updatedSchemaVersionId := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + initialSchemaVersionId := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + updatedSchemaVersionId := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, commits query", diff --git a/tests/integration/schema/updates/add/field/kind/dockey_test.go b/tests/integration/schema/updates/add/field/kind/doc_id_test.go similarity index 83% rename from tests/integration/schema/updates/add/field/kind/dockey_test.go rename to tests/integration/schema/updates/add/field/kind/doc_id_test.go index 6d8aca4736..edac43150f 100644 --- a/tests/integration/schema/updates/add/field/kind/dockey_test.go +++ b/tests/integration/schema/updates/add/field/kind/doc_id_test.go @@ -16,9 +16,9 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) -func TestSchemaUpdatesAddFieldKindDocKey(t *testing.T) { +func TestSchemaUpdatesAddFieldKindDocID(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind DocKey (1)", + Description: "Test schema update, add field with kind DocID (1)", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -48,9 +48,9 @@ func TestSchemaUpdatesAddFieldKindDocKey(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindDocKeyWithCreate(t *testing.T) { +func TestSchemaUpdatesAddFieldKindDocIDWithCreate(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind DocKey (1) and create", + Description: "Test schema update, add field with kind DocID (1) and create", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -92,9 +92,9 @@ func TestSchemaUpdatesAddFieldKindDocKeyWithCreate(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestSchemaUpdatesAddFieldKindDocKeySubstitutionWithCreate(t *testing.T) { +func TestSchemaUpdatesAddFieldKindDocIDSubstitutionWithCreate(t *testing.T) { test := testUtils.TestCase{ - Description: "Test schema update, add field with kind DocKey substitution and create", + Description: "Test schema update, add field with kind DocID substitution and create", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index a3dc12fb13..fb14d6ef30 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -470,12 +470,12 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { testUtils.Request{ Request: `mutation { create_Users(data: "{\"name\": \"John\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": key1, + "_docID": key1, }, }, }, diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index 21afdec279..abaa1d4564 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -470,12 +470,12 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { testUtils.Request{ Request: `mutation { create_Users(data: "{\"name\": \"John\"}") { - _key + _docID } }`, Results: []map[string]any{ { - "_key": key1, + "_docID": key1, }, }, }, diff --git a/tests/integration/schema/updates/add/field/simple_test.go b/tests/integration/schema/updates/add/field/simple_test.go index 69ddfd1734..04bafb2694 100644 --- a/tests/integration/schema/updates/add/field/simple_test.go +++ b/tests/integration/schema/updates/add/field/simple_test.go @@ -20,8 +20,8 @@ import ( ) func TestSchemaUpdatesAddFieldSimple(t *testing.T) { - schemaVersion1ID := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - schemaVersion2ID := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + schemaVersion1ID := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + schemaVersion2ID := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -58,8 +58,8 @@ func TestSchemaUpdatesAddFieldSimple(t *testing.T) { Root: schemaVersion1ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { @@ -117,8 +117,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_Errors(t *testing.T) { } func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testing.T) { - schemaVersion1ID := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" - schemaVersion2ID := "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m" + schemaVersion1ID := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" + schemaVersion2ID := "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -149,8 +149,8 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testi Root: schemaVersion1ID, Fields: []client.FieldDescription{ { - Name: "_key", - Kind: client.FieldKind_DocKey, + Name: "_docID", + Kind: client.FieldKind_DocID, Typ: client.LWW_REGISTER, }, { diff --git a/tests/integration/schema/updates/move/simple_test.go b/tests/integration/schema/updates/move/simple_test.go index e6d8bb1556..e16226c1cf 100644 --- a/tests/integration/schema/updates/move/simple_test.go +++ b/tests/integration/schema/updates/move/simple_test.go @@ -17,7 +17,7 @@ import ( ) func TestSchemaUpdatesMoveCollectionDoesNothing(t *testing.T) { - schemaVersionID := "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq" + schemaVersionID := "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a" test := testUtils.TestCase{ Description: "Test schema update, move collection", diff --git a/tests/integration/schema/with_update_set_default_test.go b/tests/integration/schema/with_update_set_default_test.go index 55242d7a2a..602e6d48d6 100644 --- a/tests/integration/schema/with_update_set_default_test.go +++ b/tests/integration/schema/with_update_set_default_test.go @@ -92,7 +92,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToOriginal_NewFieldIsNotQueriable( SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetDefaultSchemaVersion{ - SchemaVersionID: "bafkreih27vuxrj4j2tmxnibfm77wswa36xji74hwhq7deipj5rvh3qyabq", + SchemaVersionID: "bafkreig3zt63qt7bkji47etyu2sqtzroa3tcfdxgwqc3ka2ijy63refq3a", }, testUtils.Request{ Request: `query { @@ -129,7 +129,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToNew_AllowsQueryingOfNewField(t * SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetDefaultSchemaVersion{ - SchemaVersionID: "bafkreid5bpw7sipm63l5gxxjrs34yrq2ur5xrzyseez5rnj3pvnvkaya6m", + SchemaVersionID: "bafkreiclwd4nrvczrzy7aj52olojyzvgm4ht6jpktwpxuqej5wk3ocxpqi", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/subscription/subscription_test.go b/tests/integration/subscription/subscription_test.go index 7d51a240ad..49f8bf1f55 100644 --- a/tests/integration/subscription/subscription_test.go +++ b/tests/integration/subscription/subscription_test.go @@ -23,21 +23,21 @@ func TestSubscriptionWithCreateMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), - "name": "John", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": int64(27), + "name": "John", }, { - "_key": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", - "age": int64(31), - "name": "Addo", + "_docID": "bae-18def051-7f0f-5dc9-8a69-2a5e423f6b55", + "age": int64(31), + "name": "Addo", }, }, }, @@ -78,16 +78,16 @@ func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User(filter: {age: {_lt: 30}}) { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), - "name": "John", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": int64(27), + "name": "John", }, }, }, @@ -116,7 +116,7 @@ func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User(filter: {age: {_gt: 30}}) { - _key + _docID name age } @@ -148,16 +148,16 @@ func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User(filter: {age: {_lt: 30}}) { - _key + _docID name age } }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", - "age": int64(27), - "name": "John", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "age": int64(27), + "name": "John", }, }, }, @@ -216,7 +216,7 @@ func TestSubscriptionWithUpdateMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User { - _key + _docID name age points @@ -224,7 +224,7 @@ func TestSubscriptionWithUpdateMutations(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", "age": int64(27), "name": "John", "points": float64(45), @@ -274,7 +274,7 @@ func TestSubscriptionWithUpdateAllMutations(t *testing.T) { testUtils.SubscriptionRequest{ Request: `subscription { User { - _key + _docID name age points @@ -282,13 +282,13 @@ func TestSubscriptionWithUpdateAllMutations(t *testing.T) { }`, Results: []map[string]any{ { - "_key": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", + "_docID": "bae-0a24cf29-b2c2-5861-9d00-abd6250c475d", "age": int64(27), "name": "John", "points": float64(55), }, { - "_key": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", + "_docID": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", "age": int64(31), "name": "Addo", "points": float64(55), diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 2ae73ddeca..ccfeba4d7a 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -829,7 +829,7 @@ func refreshDocuments( // Just use the collection from the first relevant node, as all will be the same for this // purpose. collection := getNodeCollections(action.NodeID, s.collections)[0][action.CollectionID] - if err := doc.RemapAliasFieldsAndDockey(collection.Schema().Fields); err != nil { + if err := doc.RemapAliasFieldsAndDocID(collection.Schema().Fields); err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway continue @@ -837,7 +837,7 @@ func refreshDocuments( // The document may have been mutated by other actions, so to be sure we have the latest // version without having to worry about the individual update mechanics we fetch it. - doc, err = collection.Get(s.ctx, doc.Key(), false) + doc, err = collection.Get(s.ctx, doc.ID(), false) if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway @@ -1155,7 +1155,7 @@ func createDocViaGQL( request := fmt.Sprintf( `mutation { create_%s(data: %s) { - _key + _docID } }`, collection.Name(), @@ -1174,11 +1174,11 @@ func createDocViaGQL( return nil, nil } - docKeyString := resultantDocs[0]["_key"].(string) - docKey, err := client.NewDocKeyFromString(docKeyString) + docIDString := resultantDocs[0]["_docID"].(string) + docID, err := client.NewDocIDFromString(docIDString) require.NoError(s.t, err) - doc, err := collection.Get(s.ctx, docKey, false) + doc, err := collection.Get(s.ctx, docID, false) require.NoError(s.t, err) return doc, nil @@ -1199,7 +1199,7 @@ func deleteDoc( actionNodes, nodeID, func() error { - _, err := collections[action.CollectionID].DeleteWithKey(s.ctx, doc.Key()) + _, err := collections[action.CollectionID].DeleteWithDocID(s.ctx, doc.ID()) return err }, ) @@ -1287,12 +1287,12 @@ func updateDocViaGQL( request := fmt.Sprintf( `mutation { - update_%s(id: "%s", data: %s) { - _key + update_%s(docID: "%s", data: %s) { + _docID } }`, collection.Name(), - doc.Key().String(), + doc.ID().String(), escapedJson, ) diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index 4adce90805..9dc6da0dd6 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -110,7 +110,7 @@ type docGenerator struct { func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) map[string]any { result := make(map[string]any) for _, field := range typeDef.Schema.Fields { - if field.IsRelation() || field.Name == request.KeyFieldName { + if field.IsRelation() || field.Name == request.DocIDFieldName { continue } result[field.Name] = doc[field.Name] @@ -123,7 +123,7 @@ func toRequestedDoc(doc map[string]any, typeDef *client.CollectionDefinition) ma return result } -// generatePrimary generates primary docs for the given secondary doc and adds foreign keys +// generatePrimary generates primary docs for the given secondary doc and adds foreign docID // to the secondary doc to reference the primary docs. func (this *docGenerator) generatePrimary( secDocMap map[string]any, @@ -145,13 +145,13 @@ func (this *docGenerator) generatePrimary( if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } - docKey := primDoc.Key().String() - requestedSecondary[secDocField.Name+request.RelatedObjectID] = docKey + docID := primDoc.ID().String() + requestedSecondary[secDocField.Name+request.RelatedObjectID] = docID subResult = append(subResult, gen.GeneratedDoc{Col: &primType, Doc: primDoc}) result = append(result, subResult...) secondaryDocs, err := this.generateSecondaryDocs( - secDocMapField.(map[string]any), docKey, &primType, secType.Description.Name) + secDocMapField.(map[string]any), docID, &primType, secType.Description.Name) if err != nil { return nil, nil, err } @@ -164,12 +164,12 @@ func (this *docGenerator) generatePrimary( } // generateRelatedDocs generates related docs (primary and secondary) for the given doc and -// adds foreign keys to the given doc to reference the primary docs. +// adds foreign docID to the given doc to reference the primary docs. func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName string) ([]gen.GeneratedDoc, error) { typeDef := this.types[typeName] // create first primary docs and link them to the given doc so that we can define - // dockey for the complete document. + // docID for the complete document. requested, result, err := this.generatePrimary(docMap, &typeDef) if err != nil { return nil, err @@ -181,7 +181,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st result = append(result, gen.GeneratedDoc{Col: &typeDef, Doc: doc}) - secondaryDocs, err := this.generateSecondaryDocs(docMap, doc.Key().String(), &typeDef, "") + secondaryDocs, err := this.generateSecondaryDocs(docMap, doc.ID().String(), &typeDef, "") if err != nil { return nil, err } @@ -191,7 +191,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st func (this *docGenerator) generateSecondaryDocs( primaryDocMap map[string]any, - docKey string, + docID string, primaryType *client.CollectionDefinition, parentTypeName string, ) ([]gen.GeneratedDoc, error) { @@ -202,7 +202,7 @@ func (this *docGenerator) generateSecondaryDocs( if !field.IsPrimaryRelation() && (parentTypeName == "" || parentTypeName != field.Schema) { docs, err := this.generateSecondaryDocsForField( - primaryDocMap, primaryType.Description.Name, &field, docKey) + primaryDocMap, primaryType.Description.Name, &field, docID) if err != nil { return nil, err } @@ -219,7 +219,7 @@ func (this *docGenerator) generateSecondaryDocsForField( primaryDoc map[string]any, primaryTypeName string, relField *client.FieldDescription, - primaryDocKey string, + primaryDocID string, ) ([]gen.GeneratedDoc, error) { result := []gen.GeneratedDoc{} relTypeDef := this.types[relField.Schema] @@ -230,7 +230,7 @@ func (this *docGenerator) generateSecondaryDocsForField( switch relVal := primaryDoc[relField.Name].(type) { case []map[string]any: for _, relDoc := range relVal { - relDoc[primaryPropName] = primaryDocKey + relDoc[primaryPropName] = primaryDocID actions, err := this.generateRelatedDocs(relDoc, relTypeDef.Description.Name) if err != nil { return nil, err @@ -238,7 +238,7 @@ func (this *docGenerator) generateSecondaryDocsForField( result = append(result, actions...) } case map[string]any: - relVal[primaryPropName] = primaryDocKey + relVal[primaryPropName] = primaryDocID actions, err := this.generateRelatedDocs(relVal, relTypeDef.Description.Name) if err != nil { return nil, err diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index 1092280015..b63617690d 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -36,7 +36,7 @@ func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { docs, err := CreateFromSDL(schema, docsList) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs(docsList.Docs), docs) + errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -57,7 +57,7 @@ func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, }), docs) @@ -96,11 +96,11 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": mustGetDocKeyFromDocMap(map[string]any{"name": "John"})}, - {"model": "MacBook", "owner_id": mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"})}, + {"model": "iPhone", "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"})}, + {"model": "MacBook", "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"})}, }), docs) if errorMsg != "" { t.Error(errorMsg) @@ -137,9 +137,9 @@ func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ - {"name": "John", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "iPhone"})}, - {"name": "Fred", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "MacBook"})}, + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ + {"name": "John", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"})}, + {"name": "Fred", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"})}, {"model": "iPhone"}, {"model": "MacBook"}, }), docs) @@ -180,9 +180,9 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS"}) - deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone", "specs_id": specsDoc[request.KeyFieldName]}) - userDoc := mustAddKeyToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.KeyFieldName]}) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName]}) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.DocIDFieldName]}) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -222,12 +222,12 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { }) assert.NoError(t, err) - specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS"}) - userDoc := mustAddKeyToDoc(map[string]any{"name": "John"}) - deviceDoc := mustAddKeyToDoc(map[string]any{ + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}) + deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", - "specs_id": specsDoc[request.KeyFieldName], - "owner_id": userDoc[request.KeyFieldName], + "specs_id": specsDoc[request.DocIDFieldName], + "owner_id": userDoc[request.DocIDFieldName], }) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) @@ -268,9 +268,9 @@ func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { }) assert.NoError(t, err) - deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone"}) - specsDoc := mustAddKeyToDoc(map[string]any{"OS": "iOS", "device_id": deviceDoc[request.KeyFieldName]}) - userDoc := mustAddKeyToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.KeyFieldName]}) + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}) + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS", "device_id": deviceDoc[request.DocIDFieldName]}) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.DocIDFieldName]}) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -310,12 +310,12 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { }) assert.NoError(t, err) - deviceDoc := mustAddKeyToDoc(map[string]any{"model": "iPhone"}) - addressDoc := mustAddKeyToDoc(map[string]any{"street": "Backer"}) - userDoc := mustAddKeyToDoc(map[string]any{ + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}) + addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}) + userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", - "device_id": deviceDoc[request.KeyFieldName], - "address_id": addressDoc[request.KeyFieldName], + "device_id": deviceDoc[request.DocIDFieldName], + "address_id": addressDoc[request.DocIDFieldName], }) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, addressDoc}, docs) @@ -356,15 +356,15 @@ func TestGeneratePredefinedFromSchema_OneToMany(t *testing.T) { }) assert.NoError(t, err) - johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) - fredDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"}) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) + fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": johnDocKey}, - {"model": "PlayStation", "owner_id": johnDocKey}, - {"model": "Surface", "owner_id": fredDocKey}, - {"model": "Pixel", "owner_id": fredDocKey}, + {"model": "iPhone", "owner_id": johnDocID}, + {"model": "PlayStation", "owner_id": johnDocID}, + {"model": "Surface", "owner_id": fredDocID}, + {"model": "Pixel", "owner_id": fredDocID}, }), docs) if errorMsg != "" { t.Error(errorMsg) @@ -411,13 +411,13 @@ func TestGeneratePredefinedFromSchema_OneToManyToOne(t *testing.T) { }) assert.NoError(t, err) - johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, - {"model": "iPhone", "owner_id": johnDocKey}, - {"model": "MacBook", "owner_id": johnDocKey}, - {"CPU": "A13", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "iPhone", "owner_id": johnDocKey})}, - {"CPU": "M2", "device_id": mustGetDocKeyFromDocMap(map[string]any{"model": "MacBook", "owner_id": johnDocKey})}, + {"model": "iPhone", "owner_id": johnDocID}, + {"model": "MacBook", "owner_id": johnDocID}, + {"CPU": "A13", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone", "owner_id": johnDocID})}, + {"CPU": "M2", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook", "owner_id": johnDocID})}, }), docs) if errorMsg != "" { t.Error(errorMsg) @@ -492,15 +492,15 @@ func TestGeneratePredefined_OneToMany(t *testing.T) { }) assert.NoError(t, err) - johnDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "John"}) - fredDocKey := mustGetDocKeyFromDocMap(map[string]any{"name": "Fred"}) - errorMsg := assertDocs(mustAddKeysToDocs([]map[string]any{ + johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) + fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": johnDocKey}, - {"model": "PlayStation", "owner_id": johnDocKey}, - {"model": "Surface", "owner_id": fredDocKey}, - {"model": "Pixel", "owner_id": fredDocKey}, + {"model": "iPhone", "owner_id": johnDocID}, + {"model": "PlayStation", "owner_id": johnDocID}, + {"model": "Surface", "owner_id": fredDocID}, + {"model": "Pixel", "owner_id": fredDocID}, }), docs) if errorMsg != "" { t.Error(errorMsg) diff --git a/tests/predefined/util_test.go b/tests/predefined/util_test.go index da5d880ba8..c06e6c0fdc 100644 --- a/tests/predefined/util_test.go +++ b/tests/predefined/util_test.go @@ -68,22 +68,22 @@ outer: return "" } -func mustGetDocKeyFromDocMap(docMap map[string]any) string { +func mustGetDocIDFromDocMap(docMap map[string]any) string { doc, err := client.NewDocFromMap(docMap) if err != nil { panic("can not get doc from map" + err.Error()) } - return doc.Key().String() + return doc.ID().String() } -func mustAddKeyToDoc(doc map[string]any) map[string]any { - doc[request.KeyFieldName] = mustGetDocKeyFromDocMap(doc) +func mustAddDocIDToDoc(doc map[string]any) map[string]any { + doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc) return doc } -func mustAddKeysToDocs(docs []map[string]any) []map[string]any { +func mustAddDocIDsToDocs(docs []map[string]any) []map[string]any { for i := range docs { - mustAddKeyToDoc(docs[i]) + mustAddDocIDToDoc(docs[i]) } return docs } diff --git a/version/version.go b/version/version.go index 67538d302b..32de6f67c8 100644 --- a/version/version.go +++ b/version/version.go @@ -37,12 +37,12 @@ var ( type defraVersion struct { Release string `json:"release"` Commit string `json:"commit"` - CommitDate string `json:"commitdate"` + CommitDate string `json:"commitDate"` GoInfo string `json:"go"` - VersionHTTPAPI string `json:"httpapi"` - DocKeyVersions string `json:"dockeyversions"` - NetProtocol string `json:"netprotocol"` + VersionHTTPAPI string `json:"httpAPI"` + DocIDVersions string `json:"docIDVersions"` + NetProtocol string `json:"netProtocol"` } // NewDefraVersion returns a defraVersion with normalized values. @@ -55,13 +55,13 @@ func NewDefraVersion() (defraVersion, error) { VersionHTTPAPI: http.Version, NetProtocol: string(net.Protocol), } - var docKeyVersions []string - for k, v := range client.ValidDocKeyVersions { + var docIDVersions []string + for k, v := range client.ValidDocIDVersions { if v { - docKeyVersions = append(docKeyVersions, fmt.Sprintf("%x", k)) + docIDVersions = append(docIDVersions, fmt.Sprintf("%x", k)) } } - dv.DocKeyVersions = strings.Join(docKeyVersions, ",") + dv.DocIDVersions = strings.Join(docIDVersions, ",") return dv, nil } @@ -88,14 +88,14 @@ func (dv *defraVersion) StringFull() string { `defradb %s (%s %s) * HTTP API: %s * P2P multicodec: %s -* DocKey versions: %s +* DocID versions: %s * Go: %s`, dv.Release, commitHash, dv.CommitDate, dv.VersionHTTPAPI, dv.NetProtocol, - dv.DocKeyVersions, + dv.DocIDVersions, dv.GoInfo, ) } diff --git a/version/version_test.go b/version/version_test.go index f69c9959e4..1f46fc208d 100644 --- a/version/version_test.go +++ b/version/version_test.go @@ -23,7 +23,7 @@ func TestNewDefraVersion(t *testing.T) { assert.NotEmpty(t, dv.VersionHTTPAPI) assert.NotEmpty(t, dv.NetProtocol) - assert.NotEmpty(t, dv.DocKeyVersions) + assert.NotEmpty(t, dv.DocIDVersions) // These variables are set in the Makefile via BUILD_FLAGS when building defradb. // This test assumes the test suite is not using these BUILD_FLAGS. @@ -51,14 +51,14 @@ func TestDefraVersionStringFull(t *testing.T) { CommitDate: "2022-01-01T12:00:00Z", GoInfo: "1.17.5", VersionHTTPAPI: "v0", - DocKeyVersions: "1", + DocIDVersions: "1", NetProtocol: "/defra/0.0.1", } expected := `defradb test-release (abc123de 2022-01-01T12:00:00Z) * HTTP API: v0 * P2P multicodec: /defra/0.0.1 -* DocKey versions: 1 +* DocID versions: 1 * Go: 1.17.5` assert.Equal(t, expected, dv.StringFull()) @@ -71,7 +71,7 @@ func TestDefraVersion_JSON(t *testing.T) { CommitDate: "2022-01-01T12:00:00Z", GoInfo: "go1.17.5", VersionHTTPAPI: "1.2.3", - DocKeyVersions: "0123456789abcdef", + DocIDVersions: "0123456789abcdef", NetProtocol: "test-protocol", } From 0c1c4fe7fc135ff2a8c760a07ea40c7f1173c6ad Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Fri, 5 Jan 2024 10:12:04 -0500 Subject: [PATCH 37/60] refactor: Add strong typing to document creation (#2161) ## Relevant issue(s) Resolves #935 Resolves #1703 ## Description This PR adds strong document typing to document creation by using the field descriptions to determine Go types. Datetime is now properly supported and formatting is enforced. Note that a lot of docIDs have changed as a result of this refactor. --- cli/collection_create.go | 33 +- client/document.go | 542 +++++++++++------- client/document_test.go | 70 +-- client/errors.go | 22 +- client/value.go | 34 +- core/encoding.go | 6 + db/backup.go | 6 +- db/backup_test.go | 53 +- db/collection.go | 5 - db/collection_get.go | 2 +- db/collection_index.go | 2 +- db/collection_update.go | 191 +----- db/errors.go | 5 - db/fetcher/encoded_doc.go | 8 +- db/indexed_docs_test.go | 242 ++++---- .../i2161-document-strong-typing.md | 3 + http/client_collection.go | 21 +- http/handler_ccip_test.go | 2 +- http/handler_collection.go | 4 +- net/client_test.go | 28 +- net/dag_test.go | 12 +- net/peer_test.go | 18 +- net/server_test.go | 6 +- planner/create.go | 2 +- request/graphql/schema/descriptions.go | 2 +- tests/bench/bench_util.go | 2 +- tests/bench/collection/utils.go | 6 +- tests/clients/cli/wrapper_collection.go | 20 +- tests/gen/gen_auto.go | 9 +- tests/gen/gen_auto_test.go | 9 +- .../collection/update/simple/utils.go | 22 +- .../update/simple/with_doc_id_test.go | 2 +- .../update/simple/with_doc_ids_test.go | 4 +- .../update/simple/with_filter_test.go | 2 +- tests/integration/collection/utils.go | 2 +- tests/integration/events/simple/utils.go | 18 +- .../events/simple/with_create_test.go | 2 + .../events/simple/with_delete_test.go | 1 + .../events/simple/with_update_test.go | 2 + tests/integration/events/utils.go | 2 +- .../one_to_one_to_one/with_txn_test.go | 24 +- .../one_to_many/with_show_deleted_test.go | 33 +- .../one_to_one_to_one/with_txn_test.go | 34 +- .../update/field_kinds/date_time_test.go | 20 +- .../one_to_many/with_alias_test.go | 45 +- .../field_kinds/one_to_one/with_alias_test.go | 20 - .../mutation/update/with_filter_test.go | 4 +- .../mutation/update/with_ids_test.go | 4 +- tests/integration/net/order/tcp_test.go | 5 +- tests/integration/net/order/utils.go | 2 +- .../query/one_to_many/with_cid_doc_id_test.go | 21 +- .../query/one_to_many/with_id_field_test.go | 8 +- .../query/one_to_many_to_many/joins_test.go | 8 +- .../query/one_to_many_to_one/fixture.go | 4 +- .../query/one_to_many_to_one/joins_test.go | 8 +- .../query/one_to_many_to_one/simple_test.go | 18 +- .../one_to_many_to_one/with_filter_test.go | 8 +- .../query/one_to_many_to_one/with_sum_test.go | 10 +- .../query/one_to_one/simple_test.go | 4 +- .../one_to_one/with_count_filter_test.go | 8 +- .../query/one_to_one/with_filter_test.go | 12 +- .../query/one_to_two_many/simple_test.go | 8 +- .../query/simple/with_average_filter_test.go | 8 +- .../query/simple/with_count_filter_test.go | 8 +- .../with_filter/with_eq_datetime_test.go | 12 +- .../with_filter/with_ge_datetime_test.go | 20 +- .../with_filter/with_gt_datetime_test.go | 20 +- .../with_filter/with_le_datetime_test.go | 14 +- .../with_filter/with_lt_datetime_test.go | 8 +- .../with_filter/with_ne_datetime_test.go | 10 +- .../simple/with_group_average_filter_test.go | 18 +- .../query/simple/with_group_test.go | 14 +- .../query/simple/with_order_test.go | 16 +- tests/integration/results.go | 3 + .../updates/add/field/kind/datetime_test.go | 10 +- .../subscription/subscription_test.go | 2 +- tests/integration/utils2.go | 66 ++- tests/predefined/gen_predefined.go | 4 +- tests/predefined/gen_predefined_test.go | 485 +++++++++------- tests/predefined/util_test.go | 12 +- 80 files changed, 1261 insertions(+), 1199 deletions(-) create mode 100644 docs/data_format_changes/i2161-document-strong-typing.md diff --git a/cli/collection_create.go b/cli/collection_create.go index 4dca9be33a..82e1e5db09 100644 --- a/cli/collection_create.go +++ b/cli/collection_create.go @@ -11,7 +11,6 @@ package cli import ( - "encoding/json" "io" "os" @@ -66,35 +65,19 @@ Example: create from stdin return ErrNoDocOrFile } - var docMap any - if err := json.Unmarshal(docData, &docMap); err != nil { - return err - } - - switch t := docMap.(type) { - case map[string]any: - doc, err := client.NewDocFromMap(t) + if client.IsJSONArray(docData) { + docs, err := client.NewDocsFromJSON(docData, col.Schema()) if err != nil { return err } - return col.Create(cmd.Context(), doc) - case []any: - docs := make([]*client.Document, len(t)) - for i, v := range t { - docMap, ok := v.(map[string]any) - if !ok { - return ErrInvalidDocument - } - doc, err := client.NewDocFromMap(docMap) - if err != nil { - return err - } - docs[i] = doc - } return col.CreateMany(cmd.Context(), docs) - default: - return ErrInvalidDocument } + + doc, err := client.NewDocFromJSON(docData, col.Schema()) + if err != nil { + return err + } + return col.Create(cmd.Context(), doc) }, } cmd.Flags().StringVarP(&file, "file", "f", "", "File containing document(s)") diff --git a/client/document.go b/client/document.go index 6713f48dd0..113ddd1b1b 100644 --- a/client/document.go +++ b/client/document.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -12,11 +12,15 @@ package client import ( "encoding/json" + "regexp" "strings" "sync" + "time" "github.com/fxamacker/cbor/v2" "github.com/ipfs/go-cid" + "github.com/sourcenetwork/immutable" + "github.com/valyala/fastjson" "github.com/sourcenetwork/defradb/client/request" ccid "github.com/sourcenetwork/defradb/core/cid" @@ -53,42 +57,36 @@ import ( // @body: A document interface can be implemented by both a TypedDocument and a // UnTypedDocument, which use a schema and schemaless approach respectively. type Document struct { - id DocID - // SchemaVersionID holds the id of the schema version that this document is - // currently at. - // - // Migrating the document will update this value to the output version of the - // migration. - SchemaVersionID string - fields map[string]Field - values map[Field]Value - head cid.Cid - mu sync.RWMutex + id DocID + fields map[string]Field + values map[Field]Value + head cid.Cid + mu sync.RWMutex // marks if document has unsaved changes isDirty bool -} -// NewDocWithID creates a new Document with a specified DocID. -func NewDocWithID(docID DocID) *Document { - doc := newEmptyDoc() - doc.id = docID - return doc + schemaDescription SchemaDescription } -func newEmptyDoc() *Document { +func newEmptyDoc(sd SchemaDescription) *Document { return &Document{ - fields: make(map[string]Field), - values: make(map[Field]Value), + fields: make(map[string]Field), + values: make(map[Field]Value), + schemaDescription: sd, } } +// NewDocWithID creates a new Document with a specified key. +func NewDocWithID(docID DocID, sd SchemaDescription) *Document { + doc := newEmptyDoc(sd) + doc.id = docID + return doc +} + // NewDocFromMap creates a new Document from a data map. -func NewDocFromMap(data map[string]any) (*Document, error) { +func NewDocFromMap(data map[string]any, sd SchemaDescription) (*Document, error) { var err error - doc := &Document{ - fields: make(map[string]Field), - values: make(map[Field]Value), - } + doc := newEmptyDoc(sd) // check if document contains special _docID field k, hasDocID := data[request.DocIDFieldName] @@ -119,15 +117,275 @@ func NewDocFromMap(data map[string]any) (*Document, error) { return doc, nil } +var jsonArrayPattern = regexp.MustCompile(`^\s*\[.*\]\s*$`) + +// IsJSONArray returns true if the given byte array is a JSON Array. +func IsJSONArray(obj []byte) bool { + return jsonArrayPattern.Match(obj) +} + // NewFromJSON creates a new instance of a Document from a raw JSON object byte array. -func NewDocFromJSON(obj []byte) (*Document, error) { - data := make(map[string]any) - err := json.Unmarshal(obj, &data) +func NewDocFromJSON(obj []byte, sd SchemaDescription) (*Document, error) { + doc := newEmptyDoc(sd) + err := doc.SetWithJSON(obj) + if err != nil { + return nil, err + } + err = doc.generateAndSetDocID() + if err != nil { + return nil, err + } + return doc, nil +} + +// ManyFromJSON creates a new slice of Documents from a raw JSON array byte array. +// It will return an error if the given byte array is not a valid JSON array. +func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { + v, err := fastjson.ParseBytes(obj) if err != nil { return nil, err } + a, err := v.Array() + if err != nil { + return nil, err + } + + docs := make([]*Document, len(a)) + for _, v := range a { + o, err := v.Object() + if err != nil { + return nil, err + } + doc := newEmptyDoc(sd) + err = doc.setWithFastJSONObject(o) + if err != nil { + return nil, err + } + err = doc.generateAndSetDocID() + if err != nil { + return nil, err + } + docs = append(docs, doc) + } + + return docs, nil +} + +// validateFieldSchema takes a given value as an interface, +// and ensures it matches the supplied field description. +// It will do any minor parsing, like dates, and return +// the typed value again as an interface. +func validateFieldSchema(val any, field FieldDescription) (any, error) { + switch field.Kind { + case FieldKind_DocID, FieldKind_STRING, FieldKind_BLOB: + return getString(val) + + case FieldKind_STRING_ARRAY: + return getArray(val, getString) + + case FieldKind_NILLABLE_STRING_ARRAY: + return getNillableArray(val, getString) + + case FieldKind_BOOL: + return getBool(val) + + case FieldKind_BOOL_ARRAY: + return getArray(val, getBool) + + case FieldKind_NILLABLE_BOOL_ARRAY: + return getNillableArray(val, getBool) + + case FieldKind_FLOAT: + return getFloat64(val) + + case FieldKind_FLOAT_ARRAY: + return getArray(val, getFloat64) + + case FieldKind_NILLABLE_FLOAT_ARRAY: + return getNillableArray(val, getFloat64) + + case FieldKind_DATETIME: + return getDateTime(val) + + case FieldKind_INT: + return getInt64(val) + + case FieldKind_INT_ARRAY: + return getArray(val, getInt64) + + case FieldKind_NILLABLE_INT_ARRAY: + return getNillableArray(val, getInt64) + + case FieldKind_FOREIGN_OBJECT: + return getString(val) + + case FieldKind_FOREIGN_OBJECT_ARRAY: + return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) + } + + return nil, NewErrUnhandledType("FieldKind", field.Kind) +} + +func getString(v any) (string, error) { + switch val := v.(type) { + case *fastjson.Value: + b, err := val.StringBytes() + return string(b), err + default: + return val.(string), nil + } +} + +func getBool(v any) (bool, error) { + switch val := v.(type) { + case *fastjson.Value: + return val.Bool() + default: + return val.(bool), nil + } +} + +func getFloat64(v any) (float64, error) { + switch val := v.(type) { + case *fastjson.Value: + return val.Float64() + case int: + return float64(val), nil + case int64: + return float64(val), nil + case float64: + return val, nil + default: + return 0, NewErrUnexpectedType[float64]("field", v) + } +} + +func getInt64(v any) (int64, error) { + switch val := v.(type) { + case *fastjson.Value: + return val.Int64() + case int: + return int64(val), nil + case int64: + return val, nil + case float64: + return int64(val), nil + default: + return 0, NewErrUnexpectedType[int64]("field", v) + } +} + +func getDateTime(v any) (time.Time, error) { + var s string + switch val := v.(type) { + case *fastjson.Value: + b, err := val.StringBytes() + if err != nil { + return time.Time{}, err + } + s = string(b) + case time.Time: + return val, nil + default: + s = val.(string) + } + return time.Parse(time.RFC3339, s) +} + +func getArray[T any]( + v any, + typeGetter func(any) (T, error), +) ([]T, error) { + switch val := v.(type) { + case *fastjson.Value: + if val.Type() == fastjson.TypeNull { + return nil, nil + } + + valArray, err := val.Array() + if err != nil { + return nil, err + } + + arr := make([]T, len(valArray)) + for i, arrItem := range valArray { + if arrItem.Type() == fastjson.TypeNull { + continue + } + arr[i], err = typeGetter(arrItem) + if err != nil { + return nil, err + } + } + + return arr, nil + case []any: + arr := make([]T, len(val)) + for i, arrItem := range val { + var err error + arr[i], err = typeGetter(arrItem) + if err != nil { + return nil, err + } + } + + return arr, nil + case []T: + return val, nil + default: + return []T{}, nil + } +} + +func getNillableArray[T any]( + v any, + typeGetter func(any) (T, error), +) ([]immutable.Option[T], error) { + switch val := v.(type) { + case *fastjson.Value: + if val.Type() == fastjson.TypeNull { + return nil, nil + } + + valArray, err := val.Array() + if err != nil { + return nil, err + } + + arr := make([]immutable.Option[T], len(valArray)) + for i, arrItem := range valArray { + if arrItem.Type() == fastjson.TypeNull { + arr[i] = immutable.None[T]() + continue + } + v, err := typeGetter(arrItem) + if err != nil { + return nil, err + } + arr[i] = immutable.Some(v) + } - return NewDocFromMap(data) + return arr, nil + case []any: + arr := make([]immutable.Option[T], len(val)) + for i, arrItem := range val { + if arrItem == nil { + arr[i] = immutable.None[T]() + continue + } + v, err := typeGetter(arrItem) + if err != nil { + return nil, err + } + arr[i] = immutable.Some(v) + } + + return arr, nil + case []immutable.Option[T]: + return val, nil + default: + return []immutable.Option[T]{}, nil + } } // Head returns the current head CID of the document. @@ -201,30 +459,52 @@ func (doc *Document) GetValueWithField(f Field) (Value, error) { // JSON Merge Patch object. Note: fields indicated as nil in the Merge // Patch are to be deleted // @todo: Handle sub documents for SetWithJSON -func (doc *Document) SetWithJSON(patch []byte) error { - var patchObj map[string]any - err := json.Unmarshal(patch, &patchObj) +func (doc *Document) SetWithJSON(obj []byte) error { + v, err := fastjson.ParseBytes(obj) + if err != nil { + return err + } + o, err := v.Object() if err != nil { return err } - for k, v := range patchObj { - err = doc.Set(k, v) + return doc.setWithFastJSONObject(o) +} + +func (doc *Document) setWithFastJSONObject(obj *fastjson.Object) error { + var visitErr error + obj.Visit(func(k []byte, v *fastjson.Value) { + fieldName := string(k) + err := doc.Set(fieldName, v) if err != nil { - return err + visitErr = err + return } - } - return nil + }) + return visitErr } // Set the value of a field. func (doc *Document) Set(field string, value any) error { - return doc.setAndParseType(field, value) -} - -// SetAs is the same as set, but you can manually set the CRDT type. -func (doc *Document) SetAs(field string, value any, t CType) error { - return doc.setCBOR(t, field, value) + fd, exists := doc.schemaDescription.GetField(field) + if !exists { + return NewErrFieldNotExist(field) + } + if fd.IsRelation() && !fd.IsObjectArray() { + if !strings.HasSuffix(field, request.RelatedObjectID) { + field = field + request.RelatedObjectID + } + fd, exists = doc.schemaDescription.GetField(field) + if !exists { + return NewErrFieldNotExist(field) + } + } + val, err := validateFieldSchema(value, fd) + if err != nil { + return err + } + return doc.setCBOR(fd.Typ, field, val) } // Delete removes a field, and marks it to be deleted on the following db.Update() call. @@ -261,83 +541,12 @@ func (doc *Document) setCBOR(t CType, field string, val any) error { return doc.set(t, field, value) } -func (doc *Document) setObject(t CType, field string, val *Document) error { - value := newValue(t, val) - return doc.set(t, field, &value) -} - -// @todo: Update with document schemas -func (doc *Document) setAndParseType(field string, value any) error { - if value == nil { - return doc.setCBOR(LWW_REGISTER, field, value) - } - - switch val := value.(type) { - // int (any number) - case int: - err := doc.setCBOR(LWW_REGISTER, field, int64(val)) - if err != nil { - return err - } - case uint64: - err := doc.setCBOR(LWW_REGISTER, field, int64(val)) - if err != nil { - return err - } - - case float64: - // case int64: - - // Check if its actually a float or just an int - if float64(int64(val)) == val { //int - err := doc.setCBOR(LWW_REGISTER, field, int64(val)) - if err != nil { - return err - } - } else { //float - err := doc.setCBOR(LWW_REGISTER, field, val) - if err != nil { - return err - } - } - - // string, bool, and more - case string, bool, int64, []any, []bool, []*bool, []int64, []*int64, []float64, []*float64, []string, []*string: - err := doc.setCBOR(LWW_REGISTER, field, val) - if err != nil { - return err - } - - // sub object, recurse down. - // @TODO: Object Definitions - // You can use an object as a way to override defaults - // and types for JSON literals. - // Eg. - // Instead of { "Timestamp": 123 } - // - which is parsed as an int - // Use { "Timestamp" : { "_Type": "uint64", "_Value": 123 } } - // - Which is parsed as an uint64 - case map[string]any: - subDoc := newEmptyDoc() - err := subDoc.setAndParseObjectType(val) - if err != nil { - return err - } - - err = doc.setObject(OBJECT, field, subDoc) - if err != nil { - return err - } - - default: - return NewErrUnhandledType(field, val) - } - return nil -} - func (doc *Document) setAndParseObjectType(value map[string]any) error { for k, v := range value { - err := doc.setAndParseType(k, v) + if v == nil { + continue + } + err := doc.Set(k, v) if err != nil { return err } @@ -423,7 +632,7 @@ func (doc *Document) Clean() { val, _ := doc.GetValueWithField(v) if val.IsDirty() { if val.IsDelete() { - doc.SetAs(v.Name(), nil, v.Type()) //nolint:errcheck + doc.Set(v.Name(), nil) //nolint:errcheck } val.Clean() } @@ -517,40 +726,6 @@ func (doc *Document) generateAndSetDocID() error { return nil } -func (doc *Document) remapAliasFields(fieldDescriptions []FieldDescription) (bool, error) { - doc.mu.Lock() - defer doc.mu.Unlock() - - foundAlias := false - for docField, docFieldValue := range doc.fields { - for _, fieldDescription := range fieldDescriptions { - maybeAliasField := docField + request.RelatedObjectID - if fieldDescription.Name == maybeAliasField { - foundAlias = true - doc.fields[maybeAliasField] = docFieldValue - delete(doc.fields, docField) - } - } - } - - return foundAlias, nil -} - -// RemapAliasFieldsAndDocID remaps the alias fields and fixes (overwrites) the DocID. -func (doc *Document) RemapAliasFieldsAndDocID(fieldDescriptions []FieldDescription) error { - foundAlias, err := doc.remapAliasFields(fieldDescriptions) - if err != nil { - return err - } - - if !foundAlias { - return nil - } - - // Update the DocID so DocID isn't based on an aliased name of a field. - return doc.generateAndSetDocID() -} - // DocumentStatus represent the state of the document in the DAG store. // It can either be `Active“ or `Deleted`. type DocumentStatus uint8 @@ -577,65 +752,6 @@ func (dStatus DocumentStatus) IsDeleted() bool { return dStatus > 1 } -// loops through an object of the form map[string]any -// and fills in the Document with each field it finds in the object. -// Automatically handles sub objects and arrays. -// Does not allow anonymous fields, error is thrown in this case -// Eg. The JSON value [1,2,3,4] by itself is a valid JSON Object, but has no -// field name. -// func parseJSONObject(doc *Document, data map[string]any) error { -// for k, v := range data { -// switch v.(type) { - -// // int (any number) -// case float64: -// // case int64: - -// // Check if its actually a float or just an int -// val := v.(float64) -// if float64(int64(val)) == val { //int -// doc.setCBOR(crdt.LWW_REGISTER, k, int64(val)) -// } else { //float -// panic("todo") -// } -// break - -// // string -// case string: -// doc.setCBOR(crdt.LWW_REGISTER, k, v) -// break - -// // array -// case []any: -// break - -// // sub object, recurse down. -// // @TODO: Object Definitions -// // You can use an object as a way to override defaults -// // and types for JSON literals. -// // Eg. -// // Instead of { "Timestamp": 123 } -// // - which is parsed as an int -// // Use { "Timestamp" : { "_Type": "uint64", "_Value": 123 } } -// // - Which is parsed as an uint64 -// case map[string]any: -// subDoc := newEmptyDoc() -// err := parseJSONObject(subDoc, v.(map[string]any)) -// if err != nil { -// return err -// } - -// doc.setObject(crdt.OBJECT, k, subDoc) -// break - -// default: -// return errors.Wrap("Unhandled type in raw JSON: %v => %T", k, v) - -// } -// } -// return nil -// } - // parses a document field path, can have sub elements if we have embedded objects. // Returns the first path, the remaining split paths, and a bool indicating if there are sub paths func parseFieldPath(path string) (string, string, bool) { diff --git a/client/document_test.go b/client/document_test.go index ee15dc5673..dc5867b562 100644 --- a/client/document_test.go +++ b/client/document_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -14,6 +14,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ccid "github.com/sourcenetwork/defradb/core/cid" ) @@ -21,18 +22,32 @@ import ( var ( testJSONObj = []byte(`{ "Name": "John", - "Age": 26, - "Address": { - "Street": "Main", - "City": "Toronto" - } + "Age": 26 }`) pref = ccid.NewDefaultSHA256PrefixV1() + + schemaDescriptions = []SchemaDescription{ + { + Name: "User", + Fields: []FieldDescription{ + { + Name: "Name", + Typ: LWW_REGISTER, + Kind: FieldKind_STRING, + }, + { + Name: "Age", + Typ: LWW_REGISTER, + Kind: FieldKind_INT, + }, + }, + }, + } ) func TestNewFromJSON(t *testing.T) { - doc, err := NewDocFromJSON(testJSONObj) + doc, err := NewDocFromJSON(testJSONObj, schemaDescriptions[0]) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -61,31 +76,16 @@ func TestNewFromJSON(t *testing.T) { assert.Equal(t, doc.fields["Name"].Type(), LWW_REGISTER) assert.Equal(t, doc.fields["Age"].Name(), "Age") assert.Equal(t, doc.fields["Age"].Type(), LWW_REGISTER) - assert.Equal(t, doc.fields["Address"].Name(), "Address") - assert.Equal(t, doc.fields["Address"].Type(), OBJECT) //values assert.Equal(t, doc.values[doc.fields["Name"]].Value(), "John") assert.Equal(t, doc.values[doc.fields["Name"]].IsDocument(), false) assert.Equal(t, doc.values[doc.fields["Age"]].Value(), int64(26)) assert.Equal(t, doc.values[doc.fields["Age"]].IsDocument(), false) - assert.Equal(t, doc.values[doc.fields["Address"]].IsDocument(), true) - - //subdoc fields - subDoc := doc.values[doc.fields["Address"]].Value().(*Document) - assert.Equal(t, subDoc.fields["Street"].Name(), "Street") - assert.Equal(t, subDoc.fields["Street"].Type(), LWW_REGISTER) - assert.Equal(t, subDoc.fields["City"].Name(), "City") - assert.Equal(t, subDoc.fields["City"].Type(), LWW_REGISTER) - - //subdoc values - assert.Equal(t, subDoc.values[subDoc.fields["Street"]].Value(), "Main") - assert.Equal(t, subDoc.values[subDoc.fields["Street"]].IsDocument(), false) - assert.Equal(t, subDoc.values[subDoc.fields["City"]].Value(), "Toronto") } func TestSetWithJSON(t *testing.T) { - doc, err := NewDocFromJSON(testJSONObj) + doc, err := NewDocFromJSON(testJSONObj, schemaDescriptions[0]) if err != nil { t.Error("Error creating new doc from JSON:", err) return @@ -110,8 +110,7 @@ func TestSetWithJSON(t *testing.T) { updatePatch := []byte(`{ "Name": "Alice", - "Age": 27, - "Address": null + "Age": 27 }`) err = doc.SetWithJSON(updatePatch) if err != nil { @@ -124,26 +123,15 @@ func TestSetWithJSON(t *testing.T) { assert.Equal(t, doc.fields["Name"].Type(), LWW_REGISTER) assert.Equal(t, doc.fields["Age"].Name(), "Age") assert.Equal(t, doc.fields["Age"].Type(), LWW_REGISTER) - assert.Equal(t, doc.fields["Address"].Name(), "Address") - assert.Equal(t, doc.fields["Address"].Type(), OBJECT) //values assert.Equal(t, doc.values[doc.fields["Name"]].Value(), "Alice") assert.Equal(t, doc.values[doc.fields["Name"]].IsDocument(), false) assert.Equal(t, doc.values[doc.fields["Age"]].Value(), int64(27)) assert.Equal(t, doc.values[doc.fields["Age"]].IsDocument(), false) - assert.Equal(t, doc.values[doc.fields["Address"]].Value(), nil) - assert.Equal(t, doc.values[doc.fields["Address"]].IsDocument(), false) - - //subdoc fields - // subDoc := doc.values[doc.fields["Address"]].Value().(*Document) - // assert.Equal(t, subDoc.fields["Street"].Name(), "Street") - // assert.Equal(t, subDoc.fields["Street"].Type(), client.LWW_REGISTER) - // assert.Equal(t, subDoc.fields["City"].Name(), "City") - // assert.Equal(t, subDoc.fields["City"].Type(), client.LWW_REGISTER) - - // //subdoc values - // assert.Equal(t, subDoc.values[subDoc.fields["Street"]].Value(), "Main") - // assert.Equal(t, subDoc.values[subDoc.fields["Street"]].IsDocument(), false) - // assert.Equal(t, subDoc.values[subDoc.fields["City"]].Value(), "Toronto") +} + +func TestNewDocsFromJSON_WithObjectInsteadOfArray_Error(t *testing.T) { + _, err := NewDocsFromJSON(testJSONObj, schemaDescriptions[0]) + require.ErrorContains(t, err, "value doesn't contain array; it contains object") } diff --git a/client/errors.go b/client/errors.go index 3d1de52a3d..a15e98f8f3 100644 --- a/client/errors.go +++ b/client/errors.go @@ -17,14 +17,15 @@ import ( ) const ( - errFieldNotExist string = "The given field does not exist" - errUnexpectedType string = "unexpected type" - errParsingFailed string = "failed to parse argument" - errUninitializeProperty string = "invalid state, required property is uninitialized" - errMaxTxnRetries string = "reached maximum transaction reties" - errRelationOneSided string = "relation must be defined on both schemas" - errCollectionNotFound string = "collection not found" - errUnknownCRDT string = "unknown crdt" + errFieldNotExist string = "The given field does not exist" + errUnexpectedType string = "unexpected type" + errParsingFailed string = "failed to parse argument" + errUninitializeProperty string = "invalid state, required property is uninitialized" + errMaxTxnRetries string = "reached maximum transaction reties" + errRelationOneSided string = "relation must be defined on both schemas" + errCollectionNotFound string = "collection not found" + errUnknownCRDT string = "unknown crdt" + errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" ) // Errors returnable from this package. @@ -125,3 +126,8 @@ func NewErrUnknownCRDT(cType CType) error { errors.NewKV("Type", cType), ) } + +// NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. +func NewErrFieldOrAliasToFieldNotExist(name string) error { + return errors.New(errFieldOrAliasToFieldNotExist, errors.NewKV("Name", name)) +} diff --git a/client/value.go b/client/value.go index c07f265957..3586e9b03f 100644 --- a/client/value.go +++ b/client/value.go @@ -12,6 +12,7 @@ package client import ( "github.com/fxamacker/cbor/v2" + "github.com/sourcenetwork/immutable" ) // Value is an interface that points to a concrete Value implementation. @@ -107,5 +108,36 @@ func newCBORValue(t CType, val any) WriteableValue { } func (v cborValue) Bytes() ([]byte, error) { - return cbor.Marshal(v.value) + em, err := cbor.EncOptions{Time: cbor.TimeRFC3339}.EncMode() + if err != nil { + return nil, err + } + + var val any + switch tempVal := v.value.(type) { + case []immutable.Option[string]: + val = convertImmutable(tempVal) + case []immutable.Option[int64]: + val = convertImmutable(tempVal) + case []immutable.Option[float64]: + val = convertImmutable(tempVal) + case []immutable.Option[bool]: + val = convertImmutable(tempVal) + default: + val = v.value + } + + return em.Marshal(val) +} + +func convertImmutable[T any](vals []immutable.Option[T]) []any { + var out []any + for _, val := range vals { + if !val.HasValue() { + out = append(out, nil) + continue + } + out = append(out, val.Value()) + } + return out } diff --git a/core/encoding.go b/core/encoding.go index ba4926ffc5..f6b46a4381 100644 --- a/core/encoding.go +++ b/core/encoding.go @@ -12,6 +12,7 @@ package core import ( "fmt" + "time" "github.com/sourcenetwork/immutable" @@ -119,6 +120,11 @@ func DecodeFieldValue(fieldDesc client.FieldDescription, val any) (any, error) { case uint: return int64(v), nil } + case client.FieldKind_DATETIME: + switch v := val.(type) { + case string: + return time.Parse(time.RFC3339, v) + } } } diff --git a/db/backup.go b/db/backup.go index cc8cd01fff..d3a1138686 100644 --- a/db/backup.go +++ b/db/backup.go @@ -85,7 +85,7 @@ func (db *db) basicImport(ctx context.Context, txn datastore.Txn, filepath strin delete(docMap, request.DocIDFieldName) delete(docMap, request.NewDocIDFieldName) - doc, err := client.NewDocFromMap(docMap) + doc, err := client.NewDocFromMap(docMap, col.Schema()) if err != nil { return NewErrDocFromMap(err) } @@ -260,7 +260,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client refFieldName = field.Name + request.RelatedObjectID } - newForeignDoc, err := client.NewDocFromMap(oldForeignDoc) + newForeignDoc, err := client.NewDocFromMap(oldForeignDoc, foreignCol.Schema()) if err != nil { return err } @@ -291,7 +291,7 @@ func (db *db) basicExport(ctx context.Context, txn datastore.Txn, config *client delete(docM, refFieldName) } - newDoc, err := client.NewDocFromMap(docM) + newDoc, err := client.NewDocFromMap(docM, col.Schema()) if err != nil { return err } diff --git a/db/backup_test.go b/db/backup_test.go index cbe1aed58d..093b1a1a3f 100644 --- a/db/backup_test.go +++ b/db/backup_test.go @@ -37,14 +37,13 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { city: String }`) require.NoError(t, err) - - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`)) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) require.NoError(t, err) - col1, err := db.GetCollectionByName(ctx, "User") + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -53,10 +52,10 @@ func TestBasicExport_WithNormalFormatting_NoError(t *testing.T) { err = col1.Create(ctx, doc2) require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`)) + col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - col2, err := db.GetCollectionByName(ctx, "Address") + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -100,13 +99,13 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { }`) require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`)) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) require.NoError(t, err) - col1, err := db.GetCollectionByName(ctx, "User") + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -115,10 +114,10 @@ func TestBasicExport_WithPrettyFormatting_NoError(t *testing.T) { err = col1.Create(ctx, doc2) require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`)) + col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - col2, err := db.GetCollectionByName(ctx, "Address") + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -162,13 +161,13 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { }`) require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`)) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) require.NoError(t, err) - col1, err := db.GetCollectionByName(ctx, "User") + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -177,10 +176,10 @@ func TestBasicExport_WithSingleCollection_NoError(t *testing.T) { err = col1.Create(ctx, doc2) require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`)) + col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - col2, err := db.GetCollectionByName(ctx, "Address") + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -225,13 +224,13 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { }`) require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`)) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) require.NoError(t, err) - col1, err := db.GetCollectionByName(ctx, "User") + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 31}`), col1.Schema()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -240,13 +239,13 @@ func TestBasicExport_WithMultipleCollectionsAndUpdate_NoError(t *testing.T) { err = col1.Create(ctx, doc2) require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`)) + col2, err := db.GetCollectionByName(ctx, "Book") require.NoError(t, err) - doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`)) + doc3, err := client.NewDocFromJSON([]byte(`{"name": "John and the sourcerers' stone", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) require.NoError(t, err) - col2, err := db.GetCollectionByName(ctx, "Book") + doc4, err := client.NewDocFromJSON([]byte(`{"name": "Game of chains", "author": "bae-e933420a-988a-56f8-8952-6c245aebd519"}`), col2.Schema()) require.NoError(t, err) err = col2.Create(ctx, doc3) @@ -298,13 +297,13 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { }`) require.NoError(t, err) - doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col1, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`)) + doc1, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col1.Schema()) require.NoError(t, err) - col1, err := db.GetCollectionByName(ctx, "User") + doc2, err := client.NewDocFromJSON([]byte(`{"name": "Bob", "age": 40}`), col1.Schema()) require.NoError(t, err) err = col1.Create(ctx, doc1) @@ -313,10 +312,10 @@ func TestBasicExport_EnsureFileOverwrite_NoError(t *testing.T) { err = col1.Create(ctx, doc2) require.NoError(t, err) - doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`)) + col2, err := db.GetCollectionByName(ctx, "Address") require.NoError(t, err) - col2, err := db.GetCollectionByName(ctx, "Address") + doc3, err := client.NewDocFromJSON([]byte(`{"street": "101 Maple St", "city": "Toronto"}`), col2.Schema()) require.NoError(t, err) err = col2.Create(ctx, doc3) diff --git a/db/collection.go b/db/collection.go index f5b1cd3b27..abc9e767d6 100644 --- a/db/collection.go +++ b/db/collection.go @@ -791,11 +791,6 @@ func (c *collection) getDocIDAndPrimaryKeyFromDoc( } func (c *collection) create(ctx context.Context, txn datastore.Txn, doc *client.Document) error { - // This has to be done before docID verification happens in the next step. - if err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields); err != nil { - return err - } - docID, primaryKey, err := c.getDocIDAndPrimaryKeyFromDoc(doc) if err != nil { return err diff --git a/db/collection_get.go b/db/collection_get.go index 9ab14d4424..e19ccd58c0 100644 --- a/db/collection_get.go +++ b/db/collection_get.go @@ -85,7 +85,7 @@ func (c *collection) get( return nil, nil } - doc, err := fetcher.Decode(encodedDoc) + doc, err := fetcher.Decode(encodedDoc, c.Schema()) if err != nil { return nil, err } diff --git a/db/collection_index.go b/db/collection_index.go index c724205805..531c839280 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -258,7 +258,7 @@ func (c *collection) iterateAllDocs( break } - doc, err := fetcher.Decode(encodedDoc) + doc, err := fetcher.Decode(encodedDoc, c.Schema()) if err != nil { return err } diff --git a/db/collection_update.go b/db/collection_update.go index bdfbc0ddd5..4c1895602b 100644 --- a/db/collection_update.go +++ b/db/collection_update.go @@ -135,7 +135,7 @@ func (c *collection) updateWithDocID( if isPatch { // todo } else { - err = c.applyMergeToDoc(doc, parsedUpdater.GetObject()) + err = doc.SetWithJSON([]byte(updater)) } if err != nil { return nil, err @@ -183,7 +183,7 @@ func (c *collection) updateWithIDs( if isPatch { // todo } else { - err = c.applyMergeToDoc(doc, parsedUpdater.GetObject()) + err = doc.SetWithJSON([]byte(updater)) } if err != nil { return nil, err @@ -263,7 +263,7 @@ func (c *collection) updateWithFilter( // Get the document, and apply the patch docAsMap := docMap.ToMap(selectionPlan.Value()) - doc, err := client.NewDocFromMap(docAsMap) + doc, err := client.NewDocFromMap(docAsMap, c.Schema()) if err != nil { return nil, err } @@ -271,10 +271,10 @@ func (c *collection) updateWithFilter( if isPatch { // todo } else if isMerge { // else is fine here - err = c.applyMergeToDoc(doc, parsedUpdater.GetObject()) - } - if err != nil { - return nil, err + err := doc.SetWithJSON([]byte(updater)) + if err != nil { + return nil, err + } } _, err = c.save(ctx, txn, doc, false) @@ -290,45 +290,6 @@ func (c *collection) updateWithFilter( return results, nil } -// applyMergeToDoc applies the given json merge to the given Defra doc. -// -// It does not save the document. -func (c *collection) applyMergeToDoc( - doc *client.Document, - merge *fastjson.Object, -) error { - mergeMap := make(map[string]*fastjson.Value) - merge.Visit(func(k []byte, v *fastjson.Value) { - mergeMap[string(k)] = v - }) - - for mfield, mval := range mergeMap { - fd, isValidField := c.Schema().GetField(mfield) - if !isValidField { - return client.NewErrFieldNotExist(mfield) - } - - if fd.Kind == client.FieldKind_FOREIGN_OBJECT { - fd, isValidField = c.Schema().GetField(mfield + request.RelatedObjectID) - if !isValidField { - return client.NewErrFieldNotExist(mfield) - } - } - - cborVal, err := validateFieldSchema(mval, fd) - if err != nil { - return err - } - - err = doc.Set(fd.Name, cborVal) - if err != nil { - return err - } - } - - return nil -} - // isSecondaryIDField returns true if the given field description represents a secondary relation field ID. func (c *collection) isSecondaryIDField(fieldDesc client.FieldDescription) (client.FieldDescription, bool) { if fieldDesc.RelationType != client.Relation_Type_INTERNAL_ID { @@ -396,6 +357,12 @@ func (c *collection) patchPrimaryDoc( return nil } + pc := c.db.newCollection(primaryCol.Description(), primarySchema) + err = pc.validateOneToOneLinkDoesntAlreadyExist(ctx, txn, primaryDocID.String(), primaryIDField, docID) + if err != nil { + return err + } + existingVal, err := doc.GetValue(primaryIDField.Name) if err != nil && !errors.Is(err, client.ErrFieldNotExist) { return err @@ -418,138 +385,6 @@ func (c *collection) patchPrimaryDoc( return nil } -// validateFieldSchema takes a given value as an interface, -// and ensures it matches the supplied field description. -// It will do any minor parsing, like dates, and return -// the typed value again as an interface. -func validateFieldSchema(val *fastjson.Value, field client.FieldDescription) (any, error) { - switch field.Kind { - case client.FieldKind_DocID, client.FieldKind_STRING: - return getString(val) - - case client.FieldKind_STRING_ARRAY: - return getArray(val, getString) - - case client.FieldKind_NILLABLE_STRING_ARRAY: - return getNillableArray(val, getString) - - case client.FieldKind_BOOL: - return getBool(val) - - case client.FieldKind_BOOL_ARRAY: - return getArray(val, getBool) - - case client.FieldKind_NILLABLE_BOOL_ARRAY: - return getNillableArray(val, getBool) - - case client.FieldKind_FLOAT: - return getFloat64(val) - - case client.FieldKind_FLOAT_ARRAY: - return getArray(val, getFloat64) - - case client.FieldKind_NILLABLE_FLOAT_ARRAY: - return getNillableArray(val, getFloat64) - - case client.FieldKind_DATETIME: - // @TODO: Requires Typed Document refactor - // to handle this correctly. - // For now, we will persist DateTime as a - // RFC3339 string - // see https://github.com/sourcenetwork/defradb/issues/935 - return getString(val) - - case client.FieldKind_INT: - return getInt64(val) - - case client.FieldKind_INT_ARRAY: - return getArray(val, getInt64) - - case client.FieldKind_NILLABLE_INT_ARRAY: - return getNillableArray(val, getInt64) - - case client.FieldKind_FOREIGN_OBJECT, client.FieldKind_FOREIGN_OBJECT_ARRAY: - return nil, NewErrFieldOrAliasToFieldNotExist(field.Name) - - case client.FieldKind_BLOB: - return getString(val) - } - - return nil, client.NewErrUnhandledType("FieldKind", field.Kind) -} - -func getString(v *fastjson.Value) (string, error) { - b, err := v.StringBytes() - return string(b), err -} - -func getBool(v *fastjson.Value) (bool, error) { - return v.Bool() -} - -func getFloat64(v *fastjson.Value) (float64, error) { - return v.Float64() -} - -func getInt64(v *fastjson.Value) (int64, error) { - return v.Int64() -} - -func getArray[T any]( - val *fastjson.Value, - typeGetter func(*fastjson.Value) (T, error), -) ([]T, error) { - if val.Type() == fastjson.TypeNull { - return nil, nil - } - - valArray, err := val.Array() - if err != nil { - return nil, err - } - - arr := make([]T, len(valArray)) - for i, arrItem := range valArray { - if arrItem.Type() == fastjson.TypeNull { - continue - } - arr[i], err = typeGetter(arrItem) - if err != nil { - return nil, err - } - } - - return arr, nil -} - -func getNillableArray[T any]( - val *fastjson.Value, - typeGetter func(*fastjson.Value) (T, error), -) ([]*T, error) { - if val.Type() == fastjson.TypeNull { - return nil, nil - } - - valArray, err := val.Array() - if err != nil { - return nil, err - } - - arr := make([]*T, len(valArray)) - for i, arrItem := range valArray { - if arrItem.Type() == fastjson.TypeNull { - continue - } - v, err := typeGetter(arrItem) - if err != nil { - return nil, err - } - arr[i] = &v - } - - return arr, nil -} - // makeSelectionPlan constructs a simple read-only plan of the collection using the given filter. // currently it doesn't support any other operations other than filters. // (IE: No limit, order, etc) diff --git a/db/errors.go b/db/errors.go index ca5e09e107..db6a139b06 100644 --- a/db/errors.go +++ b/db/errors.go @@ -111,11 +111,6 @@ var ( ErrInvalidViewQuery = errors.New(errInvalidViewQuery) ) -// NewErrFieldOrAliasToFieldNotExist returns an error indicating that the given field or an alias field does not exist. -func NewErrFieldOrAliasToFieldNotExist(name string) error { - return errors.New(errFieldOrAliasToFieldNotExist, errors.NewKV("Name", name)) -} - // NewErrFailedToGetHeads returns a new error indicating that the heads of a document // could not be obtained. func NewErrFailedToGetHeads(inner error) error { diff --git a/db/fetcher/encoded_doc.go b/db/fetcher/encoded_doc.go index dc9291fb0d..e88ee80f9d 100644 --- a/db/fetcher/encoded_doc.go +++ b/db/fetcher/encoded_doc.go @@ -106,27 +106,25 @@ func (encdoc *encodedDocument) Reset() { } // Decode returns a properly decoded document object -func Decode(encdoc EncodedDocument) (*client.Document, error) { +func Decode(encdoc EncodedDocument, sd client.SchemaDescription) (*client.Document, error) { docID, err := client.NewDocIDFromString(string(encdoc.ID())) if err != nil { return nil, err } - doc := client.NewDocWithID(docID) + doc := client.NewDocWithID(docID, sd) properties, err := encdoc.Properties(false) if err != nil { return nil, err } for desc, val := range properties { - err = doc.SetAs(desc.Name, val, desc.Typ) + err = doc.Set(desc.Name, val) if err != nil { return nil, err } } - doc.SchemaVersionID = encdoc.SchemaVersionID() - // client.Document tracks which fields have been set ('dirtied'), here we // are simply decoding a clean document and the dirty flag is an artifact // of the current client.Document interface. diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index 38309bf745..a820b78b30 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -15,9 +15,7 @@ import ( "encoding/json" "errors" "fmt" - "strconv" "testing" - "time" ipfsDatastore "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" @@ -55,22 +53,22 @@ func (f *indexTestFixture) saveDocToCollection(doc *client.Document, col client. require.NoError(f.t, err) } -func (f *indexTestFixture) newUserDoc(name string, age int) *client.Document { +func (f *indexTestFixture) newUserDoc(name string, age int, col client.Collection) *client.Document { d := userDoc{Name: name, Age: age, Weight: 154.1} data, err := json.Marshal(d) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data) + doc, err := client.NewDocFromJSON(data, col.Schema()) require.NoError(f.t, err) return doc } -func (f *indexTestFixture) newProdDoc(id int, price float64, cat string) *client.Document { +func (f *indexTestFixture) newProdDoc(id int, price float64, cat string, col client.Collection) *client.Document { d := productDoc{ID: id, Price: price, Category: cat} data, err := json.Marshal(d) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data) + doc, err := client.NewDocFromJSON(data, col.Schema()) require.NoError(f.t, err) return doc } @@ -256,7 +254,7 @@ func TestNonUnique_IfDocIsAdded_ShouldBeIndexed(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build() @@ -271,7 +269,7 @@ func TestNonUnique_IfFailsToStoredIndexedDoc_Error(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) key := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build() mockTxn := f.mockTxn() @@ -296,7 +294,7 @@ func TestNonUnique_IfDocDoesNotHaveIndexedField_SkipIndex(t *testing.T) { }{Age: 21, Weight: 154.1}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data) + doc, err := client.NewDocFromJSON(data, f.users.Schema()) require.NoError(f.t, err) err = f.users.Create(f.ctx, doc) @@ -312,7 +310,7 @@ func TestNonUnique_IfSystemStorageHasInvalidIndexDescription_Error(t *testing.T) defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) mockTxn := f.mockTxn().ClearSystemStore() systemStoreOn := mockTxn.MockSystemstore.EXPECT() @@ -328,7 +326,7 @@ func TestNonUnique_IfSystemStorageFailsToReadIndexDesc_Error(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) testErr := errors.New("test error") @@ -346,7 +344,7 @@ func TestNonUnique_IfIndexIntField_StoreIt(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnAge() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) key := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Doc(doc).Build() @@ -367,8 +365,8 @@ func TestNonUnique_IfMultipleCollectionsWithIndexes_StoreIndexWithCollectionID(t require.NoError(f.t, err) f.commitTxn() - userDoc := f.newUserDoc("John", 21) - prodDoc := f.newProdDoc(1, 3, "games") + userDoc := f.newUserDoc("John", 21, users) + prodDoc := f.newProdDoc(1, 3, "games", products) err = users.Create(f.ctx, userDoc) require.NoError(f.t, err) @@ -393,7 +391,7 @@ func TestNonUnique_IfMultipleIndexes_StoreIndexWithIndexID(t *testing.T) { f.createUserCollectionIndexOnName() f.createUserCollectionIndexOnAge() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) nameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Doc(doc).Build() @@ -407,92 +405,92 @@ func TestNonUnique_IfMultipleIndexes_StoreIndexWithIndexID(t *testing.T) { assert.Len(t, data, 0) } -func TestNonUnique_StoringIndexedFieldValueOfDifferentTypes(t *testing.T) { - f := newIndexTestFixtureBare(t) - - now := time.Now() - nowStr := now.Format(time.RFC3339) - - testCase := []struct { - Name string - FieldKind client.FieldKind - // FieldVal is the value the index will receive for serialization - FieldVal any - ShouldFail bool - }{ - {Name: "invalid int", FieldKind: client.FieldKind_INT, FieldVal: "invalid", ShouldFail: true}, - {Name: "invalid float", FieldKind: client.FieldKind_FLOAT, FieldVal: "invalid", ShouldFail: true}, - {Name: "invalid bool", FieldKind: client.FieldKind_BOOL, FieldVal: "invalid", ShouldFail: true}, - {Name: "invalid datetime", FieldKind: client.FieldKind_DATETIME, FieldVal: nowStr[1:], ShouldFail: true}, - {Name: "invalid datetime type", FieldKind: client.FieldKind_DATETIME, FieldVal: 1, ShouldFail: true}, - {Name: "invalid blob", FieldKind: client.FieldKind_BLOB, FieldVal: "invalid", ShouldFail: true}, - {Name: "invalid blob type", FieldKind: client.FieldKind_BLOB, FieldVal: 1, ShouldFail: true}, - - {Name: "valid int", FieldKind: client.FieldKind_INT, FieldVal: 12}, - {Name: "valid float", FieldKind: client.FieldKind_FLOAT, FieldVal: 36.654}, - {Name: "valid bool true", FieldKind: client.FieldKind_BOOL, FieldVal: true}, - {Name: "valid bool false", FieldKind: client.FieldKind_BOOL, FieldVal: false}, - {Name: "valid datetime string", FieldKind: client.FieldKind_DATETIME, FieldVal: nowStr}, - {Name: "valid empty string", FieldKind: client.FieldKind_STRING, FieldVal: ""}, - {Name: "valid blob type", FieldKind: client.FieldKind_BLOB, FieldVal: "00ff"}, - } - - for i, tc := range testCase { - _, err := f.db.AddSchema( - f.ctx, - fmt.Sprintf( - `type %s { - field: %s - }`, - "testTypeCol"+strconv.Itoa(i), - tc.FieldKind.String(), - ), - ) - require.NoError(f.t, err) - - collection, err := f.db.GetCollectionByName(f.ctx, "testTypeCol"+strconv.Itoa(i)) - require.NoError(f.t, err) - - f.txn, err = f.db.NewTxn(f.ctx, false) - require.NoError(f.t, err) - - indexDesc := client.IndexDescription{ - Fields: []client.IndexedFieldDescription{ - {Name: "field", Direction: client.Ascending}, - }, - } - - _, err = f.createCollectionIndexFor(collection.Name(), indexDesc) - require.NoError(f.t, err) - f.commitTxn() - - d := struct { - Field any `json:"field"` - }{Field: tc.FieldVal} - data, err := json.Marshal(d) - require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(data) - require.NoError(f.t, err) - - err = collection.Create(f.ctx, doc) - f.commitTxn() - if tc.ShouldFail { - require.ErrorIs(f.t, err, - NewErrInvalidFieldValue(tc.FieldKind, tc.FieldVal), "test case: %s", tc.Name) - } else { - assertMsg := fmt.Sprintf("test case: %s", tc.Name) - require.NoError(f.t, err, assertMsg) - - keyBuilder := newIndexKeyBuilder(f).Col(collection.Name()).Field("field").Doc(doc) - key := keyBuilder.Build() - - keyStr := key.ToDS() - data, err := f.txn.Datastore().Get(f.ctx, keyStr) - require.NoError(t, err, assertMsg) - assert.Len(t, data, 0, assertMsg) - } - } -} +// func TestNonUnique_StoringIndexedFieldValueOfDifferentTypes(t *testing.T) { +// f := newIndexTestFixtureBare(t) + +// now := time.Now() +// nowStr := now.Format(time.RFC3339) + +// testCase := []struct { +// Name string +// FieldKind client.FieldKind +// // FieldVal is the value the index will receive for serialization +// FieldVal any +// ShouldFail bool +// }{ +// {Name: "invalid int", FieldKind: client.FieldKind_INT, FieldVal: "invalid", ShouldFail: true}, +// {Name: "invalid float", FieldKind: client.FieldKind_FLOAT, FieldVal: "invalid", ShouldFail: true}, +// {Name: "invalid bool", FieldKind: client.FieldKind_BOOL, FieldVal: "invalid", ShouldFail: true}, +// {Name: "invalid datetime", FieldKind: client.FieldKind_DATETIME, FieldVal: nowStr[1:], ShouldFail: true}, +// {Name: "invalid datetime type", FieldKind: client.FieldKind_DATETIME, FieldVal: 1, ShouldFail: true}, +// {Name: "invalid blob", FieldKind: client.FieldKind_BLOB, FieldVal: "invalid", ShouldFail: true}, +// {Name: "invalid blob type", FieldKind: client.FieldKind_BLOB, FieldVal: 1, ShouldFail: true}, + +// {Name: "valid int", FieldKind: client.FieldKind_INT, FieldVal: 12}, +// {Name: "valid float", FieldKind: client.FieldKind_FLOAT, FieldVal: 36.654}, +// {Name: "valid bool true", FieldKind: client.FieldKind_BOOL, FieldVal: true}, +// {Name: "valid bool false", FieldKind: client.FieldKind_BOOL, FieldVal: false}, +// {Name: "valid datetime string", FieldKind: client.FieldKind_DATETIME, FieldVal: nowStr}, +// {Name: "valid empty string", FieldKind: client.FieldKind_STRING, FieldVal: ""}, +// {Name: "valid blob type", FieldKind: client.FieldKind_BLOB, FieldVal: "00ff"}, +// } + +// for i, tc := range testCase { +// _, err := f.db.AddSchema( +// f.ctx, +// fmt.Sprintf( +// `type %s { +// field: %s +// }`, +// "testTypeCol"+strconv.Itoa(i), +// tc.FieldKind.String(), +// ), +// ) +// require.NoError(f.t, err) + +// collection, err := f.db.GetCollectionByName(f.ctx, "testTypeCol"+strconv.Itoa(i)) +// require.NoError(f.t, err) + +// f.txn, err = f.db.NewTxn(f.ctx, false) +// require.NoError(f.t, err) + +// indexDesc := client.IndexDescription{ +// Fields: []client.IndexedFieldDescription{ +// {Name: "field", Direction: client.Ascending}, +// }, +// } + +// _, err = f.createCollectionIndexFor(collection.Name(), indexDesc) +// require.NoError(f.t, err) +// f.commitTxn() + +// d := struct { +// Field any `json:"field"` +// }{Field: tc.FieldVal} +// data, err := json.Marshal(d) +// require.NoError(f.t, err) +// doc, err := client.NewDocFromJSON(data, collection.Schema()) +// require.NoError(f.t, err) + +// err = collection.Create(f.ctx, doc) +// f.commitTxn() +// if tc.ShouldFail { +// require.ErrorIs(f.t, err, +// NewErrInvalidFieldValue(tc.FieldKind, tc.FieldVal), "test case: %s", tc.Name) +// } else { +// assertMsg := fmt.Sprintf("test case: %s", tc.Name) +// require.NoError(f.t, err, assertMsg) + +// keyBuilder := newIndexKeyBuilder(f).Col(collection.Name()).Field("field").Doc(doc) +// key := keyBuilder.Build() + +// keyStr := key.ToDS() +// data, err := f.txn.Datastore().Get(f.ctx, keyStr) +// require.NoError(t, err, assertMsg) +// assert.Len(t, data, 0, assertMsg) +// } +// } +// } func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { f := newIndexTestFixture(t) @@ -504,7 +502,7 @@ func TestNonUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON) + doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -521,9 +519,9 @@ func TestNonUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() - doc1 := f.newUserDoc("John", 21) + doc1 := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc1, f.users) - doc2 := f.newUserDoc("Islam", 18) + doc2 := f.newUserDoc("Islam", 18, f.users) f.saveDocToCollection(doc2, f.users) f.createUserCollectionIndexOnName() @@ -596,7 +594,7 @@ func TestNonUniqueCreate_IfUponIndexingExistingDocsFetcherFails_ReturnError(t *t f := newIndexTestFixture(t) defer f.db.Close() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) f.users.(*collection).fetcherFactory = tc.PrepareFetcher @@ -614,7 +612,7 @@ func TestNonUniqueCreate_IfDatastoreFailsToStoreIndex_ReturnError(t *testing.T) f := newIndexTestFixture(t) defer f.db.Close() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) fieldKeyString := core.DataStoreKey{ @@ -645,15 +643,15 @@ func TestNonUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { require.NoError(f.t, err) f.commitTxn() - f.saveDocToCollection(f.newUserDoc("John", 21), users) - f.saveDocToCollection(f.newUserDoc("Islam", 23), users) + f.saveDocToCollection(f.newUserDoc("John", 21, users), users) + f.saveDocToCollection(f.newUserDoc("Islam", 23, users), users) products := f.getProductsCollectionDesc() _, err = f.createCollectionIndexFor(products.Name(), getProductsIndexDescOnCategory()) require.NoError(f.t, err) f.commitTxn() - f.saveDocToCollection(f.newProdDoc(1, 55, "games"), products) + f.saveDocToCollection(f.newProdDoc(1, 55, "games", products), products) userNameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Build() userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Build() @@ -695,7 +693,7 @@ func TestNonUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { }, } - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) for _, tc := range cases { @@ -721,7 +719,7 @@ func TestNonUniqueUpdate_IfFailsToReadIndexDescription_ReturnError(t *testing.T) defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) err := doc.Set(usersNameFieldName, "Islam") @@ -810,7 +808,7 @@ func TestNonUniqueUpdate_IfFetcherFails_ReturnError(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) f.users.(*collection).fetcherFactory = tc.PrepareFetcher @@ -835,7 +833,7 @@ func TestNonUniqueUpdate_IfFailsToUpdateIndex_ReturnError(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnAge() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) f.commitTxn() @@ -877,7 +875,7 @@ func TestNonUniqueUpdate_ShouldPassToFetcherOnlyRelevantFields(t *testing.T) { }) return f } - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) err := doc.Set(usersNameFieldName, "Islam") @@ -918,7 +916,7 @@ func TestNonUniqueUpdate_IfDatastoreFails_ReturnError(t *testing.T) { defer f.db.Close() f.createUserCollectionIndexOnName() - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) err := doc.Set(usersNameFieldName, "Islam") require.NoError(t, err) @@ -955,7 +953,7 @@ func TestNonUpdate_IfIndexedFieldWasNil_ShouldDeleteIt(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON) + doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1014,9 +1012,9 @@ func TestUniqueCreate_ShouldIndexExistingDocs(t *testing.T) { f := newIndexTestFixture(t) defer f.db.Close() - doc1 := f.newUserDoc("John", 21) + doc1 := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc1, f.users) - doc2 := f.newUserDoc("Islam", 18) + doc2 := f.newUserDoc("Islam", 18, f.users) f.saveDocToCollection(doc2, f.users) f.createUserCollectionUniqueIndexOnName() @@ -1042,7 +1040,7 @@ func TestUnique_IfIndexedFieldIsNil_StoreItAsNil(t *testing.T) { }{Age: 44}) require.NoError(f.t, err) - doc, err := client.NewDocFromJSON(docJSON) + doc, err := client.NewDocFromJSON(docJSON, f.users.Schema()) require.NoError(f.t, err) f.saveDocToCollection(doc, f.users) @@ -1064,8 +1062,8 @@ func TestUniqueDrop_ShouldDeleteStoredIndexedFields(t *testing.T) { require.NoError(f.t, err) f.commitTxn() - f.saveDocToCollection(f.newUserDoc("John", 21), users) - f.saveDocToCollection(f.newUserDoc("Islam", 23), users) + f.saveDocToCollection(f.newUserDoc("John", 21, users), users) + f.saveDocToCollection(f.newUserDoc("Islam", 23, users), users) userNameKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersNameFieldName).Build() userAgeKey := newIndexKeyBuilder(f).Col(usersColName).Field(usersAgeFieldName).Build() @@ -1103,7 +1101,7 @@ func TestUniqueUpdate_ShouldDeleteOldValueAndStoreNewOne(t *testing.T) { }, } - doc := f.newUserDoc("John", 21) + doc := f.newUserDoc("John", 21, f.users) f.saveDocToCollection(doc, f.users) for _, tc := range cases { diff --git a/docs/data_format_changes/i2161-document-strong-typing.md b/docs/data_format_changes/i2161-document-strong-typing.md new file mode 100644 index 0000000000..918798e020 --- /dev/null +++ b/docs/data_format_changes/i2161-document-strong-typing.md @@ -0,0 +1,3 @@ +# Add strong typing to document creation + +Since we now inforce type safety in the document creation, some of the fields in our tests now marshal to a different types and this is causing CIDs and docIDs to change. \ No newline at end of file diff --git a/http/client_collection.go b/http/client_collection.go index 36b99cd9f2..95a81df84f 100644 --- a/http/client_collection.go +++ b/http/client_collection.go @@ -62,13 +62,6 @@ func (c *Collection) Definition() client.CollectionDefinition { func (c *Collection) Create(ctx context.Context, doc *client.Document) error { methodURL := c.http.baseURL.JoinPath("collections", c.Description().Name) - // We must call this here, else the docID on the given object will not match - // that of the document saved in the database - err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) - if err != nil { - return err - } - body, err := doc.String() if err != nil { return err @@ -90,13 +83,6 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er var docMapList []json.RawMessage for _, doc := range docs { - // We must call this here, else the docID on the given object will not match - // that of the document saved in the database - err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) - if err != nil { - return err - } - docMap, err := doc.ToJSONPatch() if err != nil { return err @@ -313,11 +299,12 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo if err != nil { return nil, err } - var docMap map[string]any - if err := c.http.requestJson(req, &docMap); err != nil { + data, err := c.http.request(req) + if err != nil { return nil, err } - doc, err := client.NewDocFromMap(docMap) + doc := client.NewDocWithID(docID, c.def.Schema) + err = doc.SetWithJSON(data) if err != nil { return nil, err } diff --git a/http/handler_ccip_test.go b/http/handler_ccip_test.go index 66ac173a54..c0df7e6a26 100644 --- a/http/handler_ccip_test.go +++ b/http/handler_ccip_test.go @@ -203,7 +203,7 @@ func setupDatabase(t *testing.T) client.DB { col, err := cdb.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "bob"}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) diff --git a/http/handler_collection.go b/http/handler_collection.go index 87a47e1ad2..d5b4ca04f3 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -56,7 +56,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) return } - doc, err := client.NewDocFromMap(docMap) + doc, err := client.NewDocFromMap(docMap, col.Schema()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -69,7 +69,7 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { } rw.WriteHeader(http.StatusOK) case map[string]any: - doc, err := client.NewDocFromMap(t) + doc, err := client.NewDocFromMap(t, col.Schema()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return diff --git a/net/client_test.go b/net/client_test.go index 7eba460b95..5db18c4a07 100644 --- a/net/client_test.go +++ b/net/client_test.go @@ -22,12 +22,25 @@ import ( "github.com/sourcenetwork/defradb/events" ) +var sd = client.SchemaDescription{ + Name: "test", + Fields: []client.FieldDescription{ + { + Name: "test", + Kind: client.FieldKind_STRING, + Typ: client.LWW_REGISTER, + }, + }, +} + func TestPushlogWithDialFailure(t *testing.T) { ctx := context.Background() _, n := newTestNode(ctx, t) defer n.Close() - doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`)) + doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), sd) + require.NoError(t, err) + id, err := doc.GenerateDocID() require.NoError(t, err) cid, err := createCID(doc) @@ -40,7 +53,7 @@ func TestPushlogWithDialFailure(t *testing.T) { ) err = n.server.pushLog(ctx, events.Update{ - DocID: doc.ID().String(), + DocID: id.String(), Cid: cid, SchemaRoot: "test", Block: &EmptyNode{}, @@ -54,14 +67,16 @@ func TestPushlogWithInvalidPeerID(t *testing.T) { _, n := newTestNode(ctx, t) defer n.Close() - doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`)) + doc, err := client.NewDocFromJSON([]byte(`{"test": "test"}`), sd) + require.NoError(t, err) + id, err := doc.GenerateDocID() require.NoError(t, err) cid, err := createCID(doc) require.NoError(t, err) err = n.server.pushLog(ctx, events.Update{ - DocID: doc.ID().String(), + DocID: id.String(), Cid: cid, SchemaRoot: "test", Block: &EmptyNode{}, @@ -92,11 +107,12 @@ func TestPushlogW_WithValidPeerID_NoError(t *testing.T) { }`) require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`)) + col, err := n1.db.GetCollectionByName(ctx, "User") require.NoError(t, err) - col, err := n1.db.GetCollectionByName(ctx, "User") + doc, err := client.NewDocFromJSON([]byte(`{"name": "test"}`), col.Schema()) require.NoError(t, err) + err = col.Save(ctx, doc) require.NoError(t, err) diff --git a/net/dag_test.go b/net/dag_test.go index fc46b6a96c..524847bfb8 100644 --- a/net/dag_test.go +++ b/net/dag_test.go @@ -60,7 +60,10 @@ func TestSendJobWorker_WithNewJob_NoError(t *testing.T) { }`) require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col, err := db.GetCollectionByName(ctx, "User") + require.NoError(t, err) + + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -101,7 +104,10 @@ func TestSendJobWorker_WithCloseJob_NoError(t *testing.T) { }`) require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + col, err := db.GetCollectionByName(ctx, "User") + require.NoError(t, err) + + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) @@ -162,7 +168,7 @@ func TestSendJobWorker_WithPeer_NoError(t *testing.T) { col, err := db1.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) dsKey := core.DataStoreKeyFromDocID(doc.ID()) diff --git a/net/peer_test.go b/net/peer_test.go index 780ae74e35..139e160155 100644 --- a/net/peer_test.go +++ b/net/peer_test.go @@ -170,7 +170,7 @@ func TestNewPeer_WithExistingTopic_TopicAlreadyExistsError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -335,7 +335,7 @@ func TestRegisterNewDocument_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) cid, err := createCID(doc) @@ -359,7 +359,7 @@ func TestRegisterNewDocument_RPCTopicAlreadyRegisteredError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) _, err = rpc.NewTopic(ctx, n.Peer.ps, n.Peer.host.ID(), doc.ID().String(), true) @@ -476,7 +476,7 @@ func TestPushToReplicator_SingleDocumentNoPeer_FailedToReplicateLogError(t *test col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -793,7 +793,7 @@ func TestHandleDocCreateLog_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -846,7 +846,7 @@ func TestHandleDocCreateLog_WithExistingTopic_TopicExistsError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -876,7 +876,7 @@ func TestHandleDocUpdateLog_NoError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -929,7 +929,7 @@ func TestHandleDocUpdateLog_WithExistingDocIDTopic_TopicExistsError(t *testing.T col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -973,7 +973,7 @@ func TestHandleDocUpdateLog_WithExistingSchemaTopic_TopicExistsError(t *testing. col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) diff --git a/net/server_test.go b/net/server_test.go index 521a3b7634..5606dc3dc7 100644 --- a/net/server_test.go +++ b/net/server_test.go @@ -131,7 +131,7 @@ func TestNewServerWithAddTopicError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -177,7 +177,7 @@ func TestNewServerWithEmitterError(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) err = col.Create(ctx, doc) @@ -260,7 +260,7 @@ func TestPushLog(t *testing.T) { col, err := db.GetCollectionByName(ctx, "User") require.NoError(t, err) - doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`)) + doc, err := client.NewDocFromJSON([]byte(`{"name": "John", "age": 30}`), col.Schema()) require.NoError(t, err) cid, err := createCID(doc) diff --git a/planner/create.go b/planner/create.go index c8c48b658d..e272c80722 100644 --- a/planner/create.go +++ b/planner/create.go @@ -59,7 +59,7 @@ func (n *createNode) Kind() string { return "createNode" } func (n *createNode) Init() error { return nil } func (n *createNode) Start() error { - doc, err := client.NewDocFromJSON([]byte(n.newDocStr)) + doc, err := client.NewDocFromJSON([]byte(n.newDocStr), n.collection.Schema()) if err != nil { n.err = err return err diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index 7829d5e450..5ab76ff726 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -74,7 +74,7 @@ var ( client.FieldKind_STRING_ARRAY: client.LWW_REGISTER, client.FieldKind_NILLABLE_STRING_ARRAY: client.LWW_REGISTER, client.FieldKind_BLOB: client.LWW_REGISTER, - client.FieldKind_FOREIGN_OBJECT: client.NONE_CRDT, + client.FieldKind_FOREIGN_OBJECT: client.LWW_REGISTER, client.FieldKind_FOREIGN_OBJECT_ARRAY: client.NONE_CRDT, } ) diff --git a/tests/bench/bench_util.go b/tests/bench/bench_util.go index fda850e9a9..d7c00bd664 100644 --- a/tests/bench/bench_util.go +++ b/tests/bench/bench_util.go @@ -161,7 +161,7 @@ func BackfillBenchmarkDB( // create the documents docIDs := make([]client.DocID, numTypes) for j := 0; j < numTypes; j++ { - doc, err := client.NewDocFromJSON([]byte(docs[j])) + doc, err := client.NewDocFromJSON([]byte(docs[j]), cols[j].Schema()) if err != nil { errCh <- errors.Wrap("failed to create document from fixture", err) return diff --git a/tests/bench/collection/utils.go b/tests/bench/collection/utils.go index 2ef7123493..a1bed37d3a 100644 --- a/tests/bench/collection/utils.go +++ b/tests/bench/collection/utils.go @@ -170,7 +170,7 @@ func runCollectionBenchCreateMany( docs := make([]*client.Document, opCount) for j := 0; j < opCount; j++ { d, _ := fixture.GenerateDocs() - docs[j], _ = client.NewDocFromJSON([]byte(d[0])) + docs[j], _ = client.NewDocFromJSON([]byte(d[0]), collections[0].Schema()) } collections[0].CreateMany(ctx, docs) //nolint:errcheck @@ -193,7 +193,7 @@ func runCollectionBenchCreateSync(b *testing.B, for j := 0; j < runs; j++ { docs, _ := fixture.GenerateDocs() for k := 0; k < numTypes; k++ { - doc, _ := client.NewDocFromJSON([]byte(docs[k])) + doc, _ := client.NewDocFromJSON([]byte(docs[k]), collections[k].Schema()) collections[k].Create(ctx, doc) //nolint:errcheck } } @@ -232,7 +232,7 @@ func runCollectionBenchCreateAsync(b *testing.B, docs, _ := fixture.GenerateDocs() // create the documents for j := 0; j < numTypes; j++ { - doc, _ := client.NewDocFromJSON([]byte(docs[j])) + doc, _ := client.NewDocFromJSON([]byte(docs[j]), collections[j].Schema()) collections[j].Create(ctx, doc) //nolint:errcheck } diff --git a/tests/clients/cli/wrapper_collection.go b/tests/clients/cli/wrapper_collection.go index abef339cfd..8295bad8d7 100644 --- a/tests/clients/cli/wrapper_collection.go +++ b/tests/clients/cli/wrapper_collection.go @@ -58,12 +58,6 @@ func (c *Collection) Create(ctx context.Context, doc *client.Document) error { args := []string{"client", "collection", "create"} args = append(args, "--name", c.Description().Name) - // We must call this here, else the docID on the given object will not match - // that of the document saved in the database - err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) - if err != nil { - return err - } document, err := doc.String() if err != nil { return err @@ -84,12 +78,6 @@ func (c *Collection) CreateMany(ctx context.Context, docs []*client.Document) er docMapList := make([]map[string]any, len(docs)) for i, doc := range docs { - // We must call this here, else the docID on the given object will not match - // that of the document saved in the database - err := doc.RemapAliasFieldsAndDocID(c.Schema().Fields) - if err != nil { - return err - } docMap, err := doc.ToMap() if err != nil { return err @@ -310,11 +298,13 @@ func (c *Collection) Get(ctx context.Context, docID client.DocID, showDeleted bo if err != nil { return nil, err } - var docMap map[string]any - if err := json.Unmarshal(data, &docMap); err != nil { + doc := client.NewDocWithID(docID, c.Schema()) + err = doc.SetWithJSON(data) + if err != nil { return nil, err } - return client.NewDocFromMap(docMap) + doc.Clean() + return doc, nil } func (c *Collection) WithTxn(tx datastore.Txn) client.Collection { diff --git a/tests/gen/gen_auto.go b/tests/gen/gen_auto.go index c425c8de8f..7ad3bb2d41 100644 --- a/tests/gen/gen_auto.go +++ b/tests/gen/gen_auto.go @@ -12,6 +12,7 @@ package gen import ( "math/rand" + "strings" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" @@ -139,14 +140,18 @@ func (g *randomDocGenerator) generateRandomDocs(order []string) error { } if field.IsRelation() { if field.IsPrimaryRelation() { - newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocID(typeName, &field) + if strings.HasSuffix(field.Name, request.RelatedObjectID) { + newDoc[field.Name] = g.getNextPrimaryDocID(typeName, &field) + } else { + newDoc[field.Name+request.RelatedObjectID] = g.getNextPrimaryDocID(typeName, &field) + } } } else { fieldConf := g.configurator.config.ForField(typeName, field.Name) newDoc[field.Name] = g.generateRandomValue(typeName, field.Kind, fieldConf) } } - doc, err := client.NewDocFromMap(newDoc) + doc, err := client.NewDocFromMap(newDoc, typeDef.Schema) if err != nil { return err } diff --git a/tests/gen/gen_auto_test.go b/tests/gen/gen_auto_test.go index a9a8d81136..f22859df0c 100644 --- a/tests/gen/gen_auto_test.go +++ b/tests/gen/gen_auto_test.go @@ -1352,12 +1352,9 @@ func TestAutoGenerate_IfColDefinitionsAreValid_ShouldGenerate(t *testing.T) { Kind: client.FieldKind_STRING, }, { - Name: "owner", - Kind: client.FieldKind_FOREIGN_OBJECT, - Schema: "User", - RelationType: client.Relation_Type_ONE | - client.Relation_Type_ONEMANY | - client.Relation_Type_Primary, + Name: "owner_id", + Kind: client.FieldKind_DocID, + RelationType: client.Relation_Type_INTERNAL_ID, }, }, }, diff --git a/tests/integration/collection/update/simple/utils.go b/tests/integration/collection/update/simple/utils.go index 44266bc9b1..c5e73f7e97 100644 --- a/tests/integration/collection/update/simple/utils.go +++ b/tests/integration/collection/update/simple/utils.go @@ -13,18 +13,30 @@ package update import ( "testing" - testUtils "github.com/sourcenetwork/defradb/tests/integration/collection" + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + testUtilsCol "github.com/sourcenetwork/defradb/tests/integration/collection" ) -var userCollectionGQLSchema = (` +var userCollectionGQLSchema = ` type Users { name: String age: Int heightM: Float verified: Boolean } -`) +` -func executeTestCase(t *testing.T, test testUtils.TestCase) { - testUtils.ExecuteRequestTestCase(t, userCollectionGQLSchema, test) +var colDefMap = make(map[string]client.CollectionDefinition) + +func init() { + c, err := testUtils.ParseSDL(userCollectionGQLSchema) + if err != nil { + panic(err) + } + colDefMap = c +} + +func executeTestCase(t *testing.T, test testUtilsCol.TestCase) { + testUtilsCol.ExecuteRequestTestCase(t, userCollectionGQLSchema, test) } diff --git a/tests/integration/collection/update/simple/with_doc_id_test.go b/tests/integration/collection/update/simple/with_doc_id_test.go index 228438b58b..6f990f7e70 100644 --- a/tests/integration/collection/update/simple/with_doc_id_test.go +++ b/tests/integration/collection/update/simple/with_doc_id_test.go @@ -26,7 +26,7 @@ func TestUpdateWithDocID(t *testing.T) { "age": 21 }` - doc, err := client.NewDocFromJSON([]byte(docStr)) + doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"].Schema) if err != nil { assert.Fail(t, err.Error()) } diff --git a/tests/integration/collection/update/simple/with_doc_ids_test.go b/tests/integration/collection/update/simple/with_doc_ids_test.go index f32818db39..a78fa2cc29 100644 --- a/tests/integration/collection/update/simple/with_doc_ids_test.go +++ b/tests/integration/collection/update/simple/with_doc_ids_test.go @@ -26,7 +26,7 @@ func TestUpdateWithDocIDs(t *testing.T) { "age": 21 }` - doc1, err := client.NewDocFromJSON([]byte(docStr1)) + doc1, err := client.NewDocFromJSON([]byte(docStr1), colDefMap["Users"].Schema) if err != nil { assert.Fail(t, err.Error()) } @@ -36,7 +36,7 @@ func TestUpdateWithDocIDs(t *testing.T) { "age": 32 }` - doc2, err := client.NewDocFromJSON([]byte(docStr2)) + doc2, err := client.NewDocFromJSON([]byte(docStr2), colDefMap["Users"].Schema) if err != nil { assert.Fail(t, err.Error()) } diff --git a/tests/integration/collection/update/simple/with_filter_test.go b/tests/integration/collection/update/simple/with_filter_test.go index de2d24f8e2..1dc10b8de8 100644 --- a/tests/integration/collection/update/simple/with_filter_test.go +++ b/tests/integration/collection/update/simple/with_filter_test.go @@ -70,7 +70,7 @@ func TestUpdateWithFilter(t *testing.T) { "age": 21 }` - doc, err := client.NewDocFromJSON([]byte(docStr)) + doc, err := client.NewDocFromJSON([]byte(docStr), colDefMap["Users"].Schema) if err != nil { assert.Fail(t, err.Error()) } diff --git a/tests/integration/collection/utils.go b/tests/integration/collection/utils.go index 5053b65409..b8bf1cf46b 100644 --- a/tests/integration/collection/utils.go +++ b/tests/integration/collection/utils.go @@ -86,7 +86,7 @@ func setupDatabase( } for _, docStr := range docs { - doc, err := client.NewDocFromJSON([]byte(docStr)) + doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) if assertError(t, testCase.Description, err, testCase.ExpectedError) { return } diff --git a/tests/integration/events/simple/utils.go b/tests/integration/events/simple/utils.go index 689f5557f5..199a2a7c07 100644 --- a/tests/integration/events/simple/utils.go +++ b/tests/integration/events/simple/utils.go @@ -13,7 +13,9 @@ package simple import ( "testing" - testUtils "github.com/sourcenetwork/defradb/tests/integration/events" + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" + testUtilsEvt "github.com/sourcenetwork/defradb/tests/integration/events" ) var schema = ` @@ -22,6 +24,16 @@ var schema = ` } ` -func executeTestCase(t *testing.T, test testUtils.TestCase) { - testUtils.ExecuteRequestTestCase(t, schema, test) +var colDefMap = make(map[string]client.CollectionDefinition) + +func init() { + c, err := testUtils.ParseSDL(schema) + if err != nil { + panic(err) + } + colDefMap = c +} + +func executeTestCase(t *testing.T, test testUtilsEvt.TestCase) { + testUtilsEvt.ExecuteRequestTestCase(t, schema, test) } diff --git a/tests/integration/events/simple/with_create_test.go b/tests/integration/events/simple/with_create_test.go index 0c780c8fde..ec5c174106 100644 --- a/tests/integration/events/simple/with_create_test.go +++ b/tests/integration/events/simple/with_create_test.go @@ -28,6 +28,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { "name": "John" }`, ), + colDefMap["Users"].Schema, ) assert.Nil(t, err) docID1 := doc1.ID().String() @@ -38,6 +39,7 @@ func TestEventsSimpleWithCreate(t *testing.T) { "name": "Shahzad" }`, ), + colDefMap["Users"].Schema, ) assert.Nil(t, err) docID2 := doc2.ID().String() diff --git a/tests/integration/events/simple/with_delete_test.go b/tests/integration/events/simple/with_delete_test.go index df811cd648..b02b2505e1 100644 --- a/tests/integration/events/simple/with_delete_test.go +++ b/tests/integration/events/simple/with_delete_test.go @@ -28,6 +28,7 @@ func TestEventsSimpleWithDelete(t *testing.T) { "name": "John" }`, ), + colDefMap["Users"].Schema, ) assert.Nil(t, err) docID1 := doc1.ID().String() diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 30b8cab9a4..8e91ac231e 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -28,6 +28,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { "name": "John" }`, ), + colDefMap["Users"].Schema, ) assert.Nil(t, err) docID1 := doc1.ID().String() @@ -38,6 +39,7 @@ func TestEventsSimpleWithUpdate(t *testing.T) { "name": "Shahzad" }`, ), + colDefMap["Users"].Schema, ) assert.Nil(t, err) docID2 := doc2.ID().String() diff --git a/tests/integration/events/utils.go b/tests/integration/events/utils.go index 30b65bc189..d2bf418294 100644 --- a/tests/integration/events/utils.go +++ b/tests/integration/events/utils.go @@ -149,7 +149,7 @@ func setupDatabase( require.NoError(t, err) for _, docStr := range docs { - doc, err := client.NewDocFromJSON([]byte(docStr)) + doc, err := client.NewDocFromJSON([]byte(docStr), col.Schema()) require.NoError(t, err) err = col.Save(ctx, doc) diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go index 946c081929..accf929402 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go @@ -48,7 +48,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", }, }, }, @@ -61,7 +61,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }`, Results: []map[string]any{ { - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", }, }, }, @@ -83,7 +83,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": map[string]any{ - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", "name": "Book By Website", }, }, @@ -119,7 +119,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", "name": "Online", "published": map[string]any{ - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", "name": "Book By Online", }, }, @@ -146,7 +146,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", "name": "Book By Website", "publisher": map[string]any{ "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", @@ -155,7 +155,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. }, { - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", "name": "Book By Online", "publisher": map[string]any{ "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", @@ -200,7 +200,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", }, }, }, @@ -213,7 +213,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }`, Results: []map[string]any{ { - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", }, }, }, @@ -232,7 +232,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", "name": "Book By Website", "publisher": map[string]any{ "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", @@ -256,7 +256,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing }`, Results: []map[string]any{ { - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", "name": "Book By Online", "publisher": map[string]any{ "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", @@ -289,7 +289,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": map[string]any{ - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", "name": "Book By Website", }, }, @@ -298,7 +298,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing "_docID": "bae-8a381044-9206-51e7-8bc8-dc683d5f2523", "name": "Online", "published": map[string]any{ - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", "name": "Book By Online", }, }, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go index 4d75d3b916..bee050d1ae 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_many/with_show_deleted_test.go @@ -20,12 +20,28 @@ import ( testUtils "github.com/sourcenetwork/defradb/tests/integration" ) +var schemas = ` +type Book { + name: String + rating: Float + author: Author +} +type Author { + name: String + age: Int + published: [Book] +} +` + func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *testing.T) { + colDefMap, err := testUtils.ParseSDL(schemas) + require.NoError(t, err) + jsonString1 := `{ "name": "John", "age": 30 }` - doc1, err := client.NewDocFromJSON([]byte(jsonString1)) + doc1, err := client.NewDocFromJSON([]byte(jsonString1), colDefMap["Author"].Schema) require.NoError(t, err) jsonString2 := fmt.Sprintf(`{ @@ -33,7 +49,7 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test "rating": 9.9, "author_id": "%s" }`, doc1.ID()) - doc2, err := client.NewDocFromJSON([]byte(jsonString2)) + doc2, err := client.NewDocFromJSON([]byte(jsonString2), colDefMap["Book"].Schema) require.NoError(t, err) jsonString3 := fmt.Sprintf(`{ @@ -48,18 +64,7 @@ func TestDeletionOfADocumentUsingSingleDocIDWithShowDeletedDocumentQuery(t *test Description: "One to many delete document using single document id, show deleted.", Actions: []any{ testUtils.SchemaUpdate{ - Schema: ` - type Book { - name: String - rating: Float - author: Author - } - type Author { - name: String - age: Int - published: [Book] - } - `, + Schema: schemas, }, testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go index 6447551393..4eed71eeb4 100644 --- a/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/delete/field_kinds/one_to_one_to_one/with_txn_test.go @@ -25,7 +25,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -45,13 +45,13 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideForwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + delete_Book(docID: "bae-37de3681-1856-5bc9-9fd6-1595647b7d96") { _docID } }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", }, }, }, @@ -91,7 +91,7 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -111,13 +111,13 @@ func TestTxnDeletionOfRelatedDocFromPrimarySideBackwardDirection(t *testing.T) { // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + delete_Book(docID: "bae-37de3681-1856-5bc9-9fd6-1595647b7d96") { _docID } }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", }, }, }, @@ -151,7 +151,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -171,13 +171,13 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes // Delete a linked book that exists. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + delete_Book(docID: "bae-37de3681-1856-5bc9-9fd6-1595647b7d96") { _docID } }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", }, }, }, @@ -199,7 +199,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnForwardDirection(t *tes "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", "name": "Website", "published": map[string]any{ - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", "name": "Book By Website", }, }, @@ -241,7 +241,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + // "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", Doc: `{ "name": "Book By Website", "rating": 4.0, @@ -261,13 +261,13 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te // Delete a linked book that exists in transaction 0. TransactionID: immutable.Some(0), Request: `mutation { - delete_Book(docID: "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722") { + delete_Book(docID: "bae-37de3681-1856-5bc9-9fd6-1595647b7d96") { _docID } }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", }, }, }, @@ -286,7 +286,7 @@ func TestATxnCanReadARecordThatIsDeletedInANonCommitedTxnBackwardDirection(t *te }`, Results: []map[string]any{ { - "_docID": "bae-5b16ccd7-9cae-5145-a56c-03cfe7787722", + "_docID": "bae-37de3681-1856-5bc9-9fd6-1595647b7d96", "name": "Book By Website", "publisher": map[string]any{ "_docID": "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4", @@ -325,7 +325,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideForwardDirection(t *testing.T) testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + // "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -386,7 +386,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T testUtils.CreateDoc{ // books CollectionID: 0, - // "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + // "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", Doc: `{ "name": "Book By Online", "rating": 4.0, @@ -434,7 +434,7 @@ func TestTxnDeletionOfRelatedDocFromNonPrimarySideBackwardDirection(t *testing.T }`, Results: []map[string]any{ { - "_docID": "bae-edf7f0fc-f0fd-57e2-b695-569d87e1b251", + "_docID": "bae-60ffc9b4-0e31-5d63-82dc-c5cb007f2985", "name": "Book By Online", "publisher": nil, }, diff --git a/tests/integration/mutation/update/field_kinds/date_time_test.go b/tests/integration/mutation/update/field_kinds/date_time_test.go index 3a79a2c1e0..46dddaffa0 100644 --- a/tests/integration/mutation/update/field_kinds/date_time_test.go +++ b/tests/integration/mutation/update/field_kinds/date_time_test.go @@ -31,12 +31,12 @@ func TestMutationUpdate_WithDateTimeField(t *testing.T) { testUtils.CreateDoc{ Doc: `{ "name": "John", - "created_at": "2011-07-23T01:11:11.111Z" + "created_at": "2011-07-23T01:11:11-05:00" }`, }, testUtils.UpdateDoc{ Doc: `{ - "created_at": "2021-07-23T02:22:22.222Z" + "created_at": "2021-07-23T02:22:22-05:00" }`, }, testUtils.Request{ @@ -49,7 +49,7 @@ func TestMutationUpdate_WithDateTimeField(t *testing.T) { `, Results: []map[string]any{ { - "created_at": "2021-07-23T02:22:22.222Z", + "created_at": testUtils.MustParseTime("2021-07-23T02:22:22-05:00"), }, }, }, @@ -74,30 +74,30 @@ func TestMutationUpdate_WithDateTimeField_MultipleDocs(t *testing.T) { testUtils.CreateDoc{ Doc: `{ "name": "John", - "created_at": "2011-07-23T01:11:11.111Z" + "created_at": "2011-07-23T01:11:11-05:00" }`, }, testUtils.CreateDoc{ Doc: `{ "name": "Fred", - "created_at": "2021-07-23T02:22:22.222Z" + "created_at": "2021-07-23T02:22:22-05:00" }`, }, testUtils.Request{ Request: `mutation { - update_Users(data: "{\"created_at\": \"2031-07-23T03:23:23.333Z\"}") { + update_Users(data: "{\"created_at\": \"2031-07-23T03:23:23Z\"}") { name created_at } }`, Results: []map[string]any{ { - "name": "John", - "created_at": "2031-07-23T03:23:23.333Z", + "name": "Fred", + "created_at": testUtils.MustParseTime("2031-07-23T03:23:23Z"), }, { - "name": "Fred", - "created_at": "2031-07-23T03:23:23.333Z", + "name": "John", + "created_at": testUtils.MustParseTime("2031-07-23T03:23:23Z"), }, }, }, diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 576b089d1c..6f4373976f 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -65,7 +65,7 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromSingleSide_Collectio }`, bookID, ), - ExpectedError: "The given field does not exist. Name: published", + ExpectedError: "The given field or alias to field does not exist. Name: published", }, }, } @@ -134,12 +134,6 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", - // This restiction is temporary due to a bug in the collection api, see - // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection - // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -207,20 +201,12 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL( // Note: This test should probably not pass, as it contains a // reference to a document that doesnt exist. -// -// This test also documents a bug in the collection api, see: -// TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_GQL -// and https://github.com/sourcenetwork/defradb/issues/1703 for more info. func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection(t *testing.T) { author1ID := "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" invalidAuthorID := "bae-35953ca-518d-9e6b-9ce6cd00eff5" test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.CollectionNamedMutationType, - testUtils.CollectionSaveMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -247,7 +233,22 @@ func TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Coll }`, invalidAuthorID, ), - ExpectedError: "The given field does not exist. Name: author", + }, + testUtils.Request{ + Request: `query { + Book { + name + author { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Painted House", + "author": nil, + }, + }, }, }, } @@ -261,12 +262,6 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side, with a wrong field.", - // This restiction is temporary due to a bug in the collection api, see - // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection - // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -314,12 +309,6 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySide(t *testing. test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side", - // This restiction is temporary due to a bug in the collection api, see - // TestMutationUpdateOneToMany_InvalidAliasRelationNameToLinkFromManySide_Collection - // and https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index c68dcce5a3..67d5f0b38c 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -25,11 +25,6 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromPrimarySide(t *testin test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from single side", - // This restiction is temporary due to a bug in the collection api, see - // https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -76,11 +71,6 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySide(t *test test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", - // This restiction is temporary due to a bug in the collection api, see - // https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -128,11 +118,6 @@ func TestMutationUpdateOneToOne_AliasWithInvalidLengthRelationIDToLink_Error(t * test := testUtils.TestCase{ Description: "One to one update mutation using invalid alias relation id", - // This restiction is temporary due to a bug in the collection api, see - // https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -173,11 +158,6 @@ func TestMutationUpdateOneToOne_InvalidAliasRelationNameToLinkFromSecondarySide_ test := testUtils.TestCase{ Description: "One to one update mutation using alias relation id from secondary side", - // This restiction is temporary due to a bug in the collection api, see - // https://github.com/sourcenetwork/defradb/issues/1703 for more info. - SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ - testUtils.GQLRequestMutationType, - }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/update/with_filter_test.go b/tests/integration/mutation/update/with_filter_test.go index 455ff99bbf..d7b3ae9dde 100644 --- a/tests/integration/mutation/update/with_filter_test.go +++ b/tests/integration/mutation/update/with_filter_test.go @@ -95,11 +95,11 @@ func TestMutationUpdate_WithBooleanFilter(t *testing.T) { }`, Results: []map[string]any{ { - "name": "Fred", + "name": "John", "points": float64(59), }, { - "name": "John", + "name": "Fred", "points": float64(59), }, }, diff --git a/tests/integration/mutation/update/with_ids_test.go b/tests/integration/mutation/update/with_ids_test.go index 8d7a4aa6f0..d1d7645829 100644 --- a/tests/integration/mutation/update/with_ids_test.go +++ b/tests/integration/mutation/update/with_ids_test.go @@ -42,7 +42,7 @@ func TestMutationUpdate_WithIds(t *testing.T) { }`, }, testUtils.CreateDoc{ - // bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901 + // bae-3ac659d1-521a-5eba-a833-5c58b151ca72 Doc: `{ "name": "Fred", "points": 33 @@ -51,7 +51,7 @@ func TestMutationUpdate_WithIds(t *testing.T) { testUtils.Request{ Request: `mutation { update_Users( - docIDs: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-4a99afc4-a70b-5702-9642-fc1eb9ffe901"], + docIDs: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-3ac659d1-521a-5eba-a833-5c58b151ca72"], data: "{\"points\": 59}" ) { name diff --git a/tests/integration/net/order/tcp_test.go b/tests/integration/net/order/tcp_test.go index e33ca7c1e1..8a419360d5 100644 --- a/tests/integration/net/order/tcp_test.go +++ b/tests/integration/net/order/tcp_test.go @@ -17,6 +17,7 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/config" + testutils "github.com/sourcenetwork/defradb/tests/integration" ) // TestP2PWithSingleDocumentUpdatePerNode tests document syncing between two nodes with a single update per node @@ -135,10 +136,12 @@ func TestP2PWithMultipleDocumentUpdatesPerNode(t *testing.T) { // TestP2FullPReplicator tests document syncing between a node and a replicator. func TestP2FullPReplicator(t *testing.T) { + colDefMap, err := testutils.ParseSDL(userCollectionGQLSchema) + require.NoError(t, err) doc, err := client.NewDocFromJSON([]byte(`{ "Name": "John", "Age": 21 - }`)) + }`), colDefMap[userCollection].Schema) require.NoError(t, err) test := P2PTestCase{ diff --git a/tests/integration/net/order/utils.go b/tests/integration/net/order/utils.go index 09aa44bb13..e1149ae9c2 100644 --- a/tests/integration/net/order/utils.go +++ b/tests/integration/net/order/utils.go @@ -134,7 +134,7 @@ func seedDocument(ctx context.Context, db client.DB, document string) (client.Do return client.DocID{}, err } - doc, err := client.NewDocFromJSON([]byte(document)) + doc, err := client.NewDocFromJSON([]byte(document), col.Schema()) if err != nil { return client.DocID{}, err } diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index 56c324802f..f0eb805487 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -68,7 +68,7 @@ func TestQueryOneToManyWithCidAndDocID(t *testing.T) { Description: "One-to-many relation query from one side with cid and docID", Request: `query { Book ( - cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + cid: "bafybeiddywe5odj47ljhyslzey3kbmw3yqdzsstqqjh3ge6cliy2unty64" docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -117,7 +117,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { Description: "One-to-many relation query from one side with child update and parent cid and docID", Request: `query { Book ( - cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", + cid: "bafybeiddywe5odj47ljhyslzey3kbmw3yqdzsstqqjh3ge6cliy2unty64", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -172,18 +172,13 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { - Book ( - cid: "bafybeieugqrhaeyhlxo2l2b4jxcqq2ut4m3xtrm3qejz4zc4sxx4stoc5q", - docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" - ) { - name - rating - author { + Book ( + cid: "bafybeie2okvnf3w3767gspsnln5d6n54hvnmu65wjkadxciopwoi6gxqha", + docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" + ) { name } - } - }`, - + }`, Docs: map[int][]string{ //books 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d @@ -230,7 +225,7 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { Book ( - cid: "bafybeifnz3yz3rkd2bc2uv6i7ucfdlqji5wevs5anziwpr76ia45ygtbk4", + cid: "bafybeie2okvnf3w3767gspsnln5d6n54hvnmu65wjkadxciopwoi6gxqha", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name diff --git a/tests/integration/query/one_to_many/with_id_field_test.go b/tests/integration/query/one_to_many/with_id_field_test.go index c51e5f8d4c..0a26cc17ff 100644 --- a/tests/integration/query/one_to_many/with_id_field_test.go +++ b/tests/integration/query/one_to_many/with_id_field_test.go @@ -55,6 +55,7 @@ func TestQueryOneToManyWithIdFieldOnPrimary(t *testing.T) { "name": "A Time for Mercy", "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed" }`, + ExpectedError: "value doesn't contain number; it contains string", }, testUtils.Request{ Request: `query { @@ -67,13 +68,6 @@ func TestQueryOneToManyWithIdFieldOnPrimary(t *testing.T) { } }`, Results: []map[string]any{ - { - "name": "A Time for Mercy", - "author_id": "bae-2edb7fdd-cad7-5ad4-9c7d-6920245a96ed", - "author": map[string]any{ - "name": "John Grisham", - }, - }, { "name": "Painted House", "author_id": int64(123456), diff --git a/tests/integration/query/one_to_many_to_many/joins_test.go b/tests/integration/query/one_to_many_to_many/joins_test.go index f883f9ae9f..2e040b05a7 100644 --- a/tests/integration/query/one_to_many_to_many/joins_test.go +++ b/tests/integration/query/one_to_many_to_many/joins_test.go @@ -59,7 +59,7 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { // Books 1: { - // "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", Has 1 Publisher + // "bae-080d7580-a791-541e-90bd-49bf69f858e1", Has 1 Publisher `{ "name": "The Rooster Bar", "rating": 4, @@ -103,7 +103,7 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { "name": "Only Publisher of The Rooster Bar", "address": "1 Rooster Ave., Waterloo, Ontario", "yearOpened": 2022, - "book_id": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935" + "book_id": "bae-080d7580-a791-541e-90bd-49bf69f858e1" }`, `{ "name": "Only Publisher of Theif Lord", @@ -210,11 +210,11 @@ func TestOneToManyToManyJoinsAreLinkedProperly(t *testing.T) { "_docID": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", "book": []map[string]any{ { - "_docID": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", + "_docID": "bae-080d7580-a791-541e-90bd-49bf69f858e1", "name": "The Rooster Bar", "publisher": []map[string]any{ { - "_docID": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", + "_docID": "bae-a5836991-96a3-5147-83be-3374a8b62e6c", "name": "Only Publisher of The Rooster Bar", }, }, diff --git a/tests/integration/query/one_to_many_to_one/fixture.go b/tests/integration/query/one_to_many_to_one/fixture.go index aec8165da6..a078c630b2 100644 --- a/tests/integration/query/one_to_many_to_one/fixture.go +++ b/tests/integration/query/one_to_many_to_one/fixture.go @@ -75,7 +75,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { // Books { CollectionID: 1, - // "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", Has 1 Publisher + // "bae-080d7580-a791-541e-90bd-49bf69f858e1", Has 1 Publisher Doc: `{ "name": "The Rooster Bar", "rating": 4, @@ -134,7 +134,7 @@ func createDocsWith6BooksAnd5Publishers() []testUtils.CreateDoc { "name": "Only Publisher of The Rooster Bar", "address": "1 Rooster Ave., Waterloo, Ontario", "yearOpened": 2022, - "book_id": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935" + "book_id": "bae-080d7580-a791-541e-90bd-49bf69f858e1" }`, }, { diff --git a/tests/integration/query/one_to_many_to_one/joins_test.go b/tests/integration/query/one_to_many_to_one/joins_test.go index 57b76a15b9..dbb6dad8da 100644 --- a/tests/integration/query/one_to_many_to_one/joins_test.go +++ b/tests/integration/query/one_to_many_to_one/joins_test.go @@ -52,7 +52,7 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { // Books testUtils.CreateDoc{ CollectionID: 1, - // "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", Has 1 Publisher + // "bae-080d7580-a791-541e-90bd-49bf69f858e1", Has 1 Publisher Doc: `{ "name": "The Rooster Bar", "rating": 4, @@ -111,7 +111,7 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { "name": "Only Publisher of The Rooster Bar", "address": "1 Rooster Ave., Waterloo, Ontario", "yearOpened": 2022, - "book_id": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935" + "book_id": "bae-080d7580-a791-541e-90bd-49bf69f858e1" }`, }, testUtils.CreateDoc{ @@ -219,10 +219,10 @@ func TestOneToManyToOneJoinsAreLinkedProperly(t *testing.T) { "_docID": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", "book": []map[string]any{ { - "_docID": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", + "_docID": "bae-080d7580-a791-541e-90bd-49bf69f858e1", "name": "The Rooster Bar", "publisher": map[string]any{ - "_docID": "bae-3f0f19eb-b292-5e0b-b885-67e7796375f9", + "_docID": "bae-a5836991-96a3-5147-83be-3374a8b62e6c", "name": "Only Publisher of The Rooster Bar", }, }, diff --git a/tests/integration/query/one_to_many_to_one/simple_test.go b/tests/integration/query/one_to_many_to_one/simple_test.go index 62a9561ae1..03bb0b781f 100644 --- a/tests/integration/query/one_to_many_to_one/simple_test.go +++ b/tests/integration/query/one_to_many_to_one/simple_test.go @@ -52,7 +52,7 @@ func TestQueryOneToOneRelations(t *testing.T) { // Books testUtils.CreateDoc{ CollectionID: 1, - // "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", Has 1 Publisher + // "bae-080d7580-a791-541e-90bd-49bf69f858e1", Has 1 Publisher Doc: `{ "name": "The Rooster Bar", "rating": 4, @@ -84,7 +84,7 @@ func TestQueryOneToOneRelations(t *testing.T) { "name": "Only Publisher of The Rooster Bar", "address": "1 Rooster Ave., Waterloo, Ontario", "yearOpened": 2022, - "book_id": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935" + "book_id": "bae-080d7580-a791-541e-90bd-49bf69f858e1" }`, }, testUtils.CreateDoc{ @@ -110,13 +110,6 @@ func TestQueryOneToOneRelations(t *testing.T) { } }`, Results: []map[string]any{ - { - "name": "The Associate", - "author": map[string]any{ - "name": "John Grisham", - }, - "publisher": nil, - }, { "name": "The Rooster Bar", "author": map[string]any{ @@ -126,6 +119,13 @@ func TestQueryOneToOneRelations(t *testing.T) { "name": "Only Publisher of The Rooster Bar", }, }, + { + "name": "The Associate", + "author": map[string]any{ + "name": "John Grisham", + }, + "publisher": nil, + }, { "name": "Theif Lord", "author": map[string]any{ diff --git a/tests/integration/query/one_to_many_to_one/with_filter_test.go b/tests/integration/query/one_to_many_to_one/with_filter_test.go index e02ae9e12c..65c402dfa2 100644 --- a/tests/integration/query/one_to_many_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_many_to_one/with_filter_test.go @@ -52,7 +52,7 @@ func TestQueryComplexWithDeepFilterOnRenderedChildren(t *testing.T) { // Books testUtils.CreateDoc{ CollectionID: 1, - // "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", Has 1 Publisher + // "bae-080d7580-a791-541e-90bd-49bf69f858e1", Has 1 Publisher Doc: `{ "name": "The Rooster Bar", "rating": 4, @@ -84,7 +84,7 @@ func TestQueryComplexWithDeepFilterOnRenderedChildren(t *testing.T) { "name": "Only Publisher of The Rooster Bar", "address": "1 Rooster Ave., Waterloo, Ontario", "yearOpened": 2022, - "book_id": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935" + "book_id": "bae-080d7580-a791-541e-90bd-49bf69f858e1" }`, }, testUtils.CreateDoc{ @@ -302,7 +302,7 @@ func TestOneToManyToOneWithCompoundOperatorInFilterAndRelation(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 1, - // bae-0718e995-e7b5-55b1-874a-8f7d956be53c + // bae-2c116b72-21f1-5c87-9148-f69f0c0c087e Doc: `{ "name": "The Lord of the Rings", "rating": 5.0, @@ -315,7 +315,7 @@ func TestOneToManyToOneWithCompoundOperatorInFilterAndRelation(t *testing.T) { "name": "Allen & Unwin", "address": "1 Allen Ave., Sydney, Australia", "yearOpened": 1954, - "book_id": "bae-0718e995-e7b5-55b1-874a-8f7d956be53c" + "book_id": "bae-2c116b72-21f1-5c87-9148-f69f0c0c087e" }`, }, testUtils.Request{ diff --git a/tests/integration/query/one_to_many_to_one/with_sum_test.go b/tests/integration/query/one_to_many_to_one/with_sum_test.go index 0fadbfb138..b1db62f07a 100644 --- a/tests/integration/query/one_to_many_to_one/with_sum_test.go +++ b/tests/integration/query/one_to_many_to_one/with_sum_test.go @@ -24,7 +24,7 @@ func TestQueryWithSumOnInlineAndSumOnOneToManyField(t *testing.T) { // Authors testUtils.CreateDoc{ CollectionID: 0, - // bae-3c4217d2-f879-50b1-b375-acf42b764e5b, Has written 5 books + // bae-0c100ad0-1511-5f37-984d-66fa8534b06f, Has written 5 books Doc: `{ "name": "John Grisham", "age": 65, @@ -44,7 +44,7 @@ func TestQueryWithSumOnInlineAndSumOnOneToManyField(t *testing.T) { // Books testUtils.CreateDoc{ CollectionID: 1, - // "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935", Has 1 Publisher + // "bae-080d7580-a791-541e-90bd-49bf69f858e1", Has 1 Publisher Doc: `{ "name": "The Rooster Bar", "rating": 4, @@ -57,7 +57,7 @@ func TestQueryWithSumOnInlineAndSumOnOneToManyField(t *testing.T) { Doc: `{ "name": "Theif Lord", "rating": 4.8, - "author_id": "bae-3c4217d2-f879-50b1-b375-acf42b764e5b" + "author_id": "bae-0c100ad0-1511-5f37-984d-66fa8534b06f" }`, }, testUtils.CreateDoc{ @@ -66,7 +66,7 @@ func TestQueryWithSumOnInlineAndSumOnOneToManyField(t *testing.T) { Doc: `{ "name": "The Associate", "rating": 4.2, - "author_id": "bae-3c4217d2-f879-50b1-b375-acf42b764e5b" + "author_id": "bae-0c100ad0-1511-5f37-984d-66fa8534b06f" }`, }, // Publishers @@ -76,7 +76,7 @@ func TestQueryWithSumOnInlineAndSumOnOneToManyField(t *testing.T) { "name": "Only Publisher of The Rooster Bar", "address": "1 Rooster Ave., Waterloo, Ontario", "yearOpened": 2022, - "book_id": "bae-b6c078f2-3427-5b99-bafd-97dcd7c2e935" + "book_id": "bae-080d7580-a791-541e-90bd-49bf69f858e1" }`, }, testUtils.CreateDoc{ diff --git a/tests/integration/query/one_to_one/simple_test.go b/tests/integration/query/one_to_one/simple_test.go index 6f7f95b21e..b5a1da594c 100644 --- a/tests/integration/query/one_to_one/simple_test.go +++ b/tests/integration/query/one_to_one/simple_test.go @@ -126,7 +126,7 @@ func TestQueryOneToOneWithMultipleRecords(t *testing.T) { "name": "Painted House", "rating": 4.9 }`, - // "bae-d3bc0f38-a2e1-5a26-9cc9-5b3fdb41c6db" + // "bae-ad4ad79c-278d-55cd-a9e3-85f3bc9a0947" `{ "name": "Go Guide for Rust developers", "rating": 5.0 @@ -146,7 +146,7 @@ func TestQueryOneToOneWithMultipleRecords(t *testing.T) { "name": "Andrew Lone", "age": 30, "verified": true, - "published_id": "bae-d3bc0f38-a2e1-5a26-9cc9-5b3fdb41c6db" + "published_id": "bae-ad4ad79c-278d-55cd-a9e3-85f3bc9a0947" }`, }, }, diff --git a/tests/integration/query/one_to_one/with_count_filter_test.go b/tests/integration/query/one_to_one/with_count_filter_test.go index c005acac01..a69be17f78 100644 --- a/tests/integration/query/one_to_one/with_count_filter_test.go +++ b/tests/integration/query/one_to_one/with_count_filter_test.go @@ -35,7 +35,7 @@ func TestQueryOneToOneWithCountWithCompoundOrFilterThatIncludesRelation(t *testi }, testUtils.CreateDoc{ CollectionID: 0, - // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + // bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6 Doc: `{ "name": "Some Book", "rating": 4.0 @@ -51,7 +51,7 @@ func TestQueryOneToOneWithCountWithCompoundOrFilterThatIncludesRelation(t *testi }, testUtils.CreateDoc{ CollectionID: 0, - // bae-e8642720-08cb-5f5b-a8d6-7187c444a78d + // TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation Doc: `{ "name": "Yet Another Book", "rating": 3.0 @@ -73,7 +73,7 @@ func TestQueryOneToOneWithCountWithCompoundOrFilterThatIncludesRelation(t *testi "name": "Some Writer", "age": 45, "verified": false, - "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + "published_id": "bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6" }`, }, testUtils.CreateDoc{ @@ -91,7 +91,7 @@ func TestQueryOneToOneWithCountWithCompoundOrFilterThatIncludesRelation(t *testi "name": "Yet Another Writer", "age": 30, "verified": false, - "published_id": "bae-e8642720-08cb-5f5b-a8d6-7187c444a78d" + "published_id": "TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation" }`, }, testUtils.Request{ diff --git a/tests/integration/query/one_to_one/with_filter_test.go b/tests/integration/query/one_to_one/with_filter_test.go index 25b42d4268..9d00cdd416 100644 --- a/tests/integration/query/one_to_one/with_filter_test.go +++ b/tests/integration/query/one_to_one/with_filter_test.go @@ -306,7 +306,7 @@ func TestQueryOneToOneWithCompoundAndFilterThatIncludesRelation(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 0, - // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + // bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6 Doc: `{ "name": "Some Book", "rating": 4.0 @@ -336,7 +336,7 @@ func TestQueryOneToOneWithCompoundAndFilterThatIncludesRelation(t *testing.T) { "name": "Some Writer", "age": 45, "verified": false, - "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + "published_id": "bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6" }`, }, testUtils.CreateDoc{ @@ -386,7 +386,7 @@ func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 0, - // bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8 + // bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6 Doc: `{ "name": "Some Book", "rating": 4.0 @@ -402,7 +402,7 @@ func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { }, testUtils.CreateDoc{ CollectionID: 0, - // bae-e8642720-08cb-5f5b-a8d6-7187c444a78d + // TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation Doc: `{ "name": "Yet Another Book", "rating": 3.0 @@ -424,7 +424,7 @@ func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { "name": "Some Writer", "age": 45, "verified": false, - "published_id": "bae-f60d6af6-92f7-5f11-9182-1d7273a5a9e8" + "published_id": "bae-437092f3-7817-555c-bf8a-cc1c5a0a0db6" }`, }, testUtils.CreateDoc{ @@ -442,7 +442,7 @@ func TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation(t *testing.T) { "name": "Yet Another Writer", "age": 30, "verified": false, - "published_id": "bae-e8642720-08cb-5f5b-a8d6-7187c444a78d" + "published_id": "TestQueryOneToOneWithCompoundOrFilterThatIncludesRelation" }`, }, testUtils.Request{ diff --git a/tests/integration/query/one_to_two_many/simple_test.go b/tests/integration/query/one_to_two_many/simple_test.go index 6a8fe674e2..6768c9e9b9 100644 --- a/tests/integration/query/one_to_two_many/simple_test.go +++ b/tests/integration/query/one_to_two_many/simple_test.go @@ -243,7 +243,7 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships(t *testing.T) { "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "reviewedBy_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", - "price_id": "bae-d64a5165-1e77-5a67-95f2-6b1ff14b2179" + "price_id": "bae-b4b58dab-7bc3-5a3a-a26b-63d9d555116d" }`, `{ "name": "Theif Lord", @@ -274,7 +274,7 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships(t *testing.T) { "currency": "GBP", "value": 12.99 }`, - // bae-d64a5165-1e77-5a67-95f2-6b1ff14b2179 + // bae-b4b58dab-7bc3-5a3a-a26b-63d9d555116d `{ "currency": "SEK", "value": 129 @@ -362,7 +362,7 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships(t *testing.T) { "rating": 4.5, "author_id": "bae-41598f0c-19bc-5da6-813b-e80f14a10df3", "reviewedBy_id": "bae-b769708d-f552-5c3d-a402-ccfd7ac7fb04", - "price_id": "bae-d64a5165-1e77-5a67-95f2-6b1ff14b2179" + "price_id": "bae-b4b58dab-7bc3-5a3a-a26b-63d9d555116d" }`, `{ "name": "Theif Lord", @@ -393,7 +393,7 @@ func TestQueryOneToTwoManyWithNamedAndUnnamedRelationships(t *testing.T) { "currency": "GBP", "value": 12.99 }`, - // bae-d64a5165-1e77-5a67-95f2-6b1ff14b2179 + // bae-b4b58dab-7bc3-5a3a-a26b-63d9d555116d `{ "currency": "SEK", "value": 129 diff --git a/tests/integration/query/simple/with_average_filter_test.go b/tests/integration/query/simple/with_average_filter_test.go index 50dad98819..8711c56e6b 100644 --- a/tests/integration/query/simple/with_average_filter_test.go +++ b/tests/integration/query/simple/with_average_filter_test.go @@ -52,24 +52,24 @@ func TestQuerySimpleWithAverageWithDateTimeFilter(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query, average with datetime filter", Request: `query { - _avg(Users: {field: Age, filter: {CreatedAt: {_gt: "2017-07-23T03:46:56.647Z"}}}) + _avg(Users: {field: Age, filter: {CreatedAt: {_gt: "2017-07-23T03:46:56-05:00"}}}) }`, Docs: map[int][]string{ 0: { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 30, - "CreatedAt": "2018-07-23T03:46:56.647Z" + "CreatedAt": "2018-07-23T03:46:56-05:00" }`, `{ "Name": "Alice", "Age": 32, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, }, }, diff --git a/tests/integration/query/simple/with_count_filter_test.go b/tests/integration/query/simple/with_count_filter_test.go index 52352b4898..4724815a9c 100644 --- a/tests/integration/query/simple/with_count_filter_test.go +++ b/tests/integration/query/simple/with_count_filter_test.go @@ -52,24 +52,24 @@ func TestQuerySimpleWithCountWithDateTimeFilter(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query, count with datetime filter", Request: `query { - _count(Users: {filter: {CreatedAt: {_gt: "2017-08-23T03:46:56.647Z"}}}) + _count(Users: {filter: {CreatedAt: {_gt: "2017-08-23T03:46:56-05:00"}}}) }`, Docs: map[int][]string{ 0: { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 30, - "CreatedAt": "2017-09-23T03:46:56.647Z" + "CreatedAt": "2017-09-23T03:46:56-05:00" }`, `{ "Name": "Alice", "Age": 32, - "CreatedAt": "2017-10-23T03:46:56.647Z" + "CreatedAt": "2017-10-23T03:46:56-05:00" }`, }, }, diff --git a/tests/integration/query/simple/with_filter/with_eq_datetime_test.go b/tests/integration/query/simple/with_filter/with_eq_datetime_test.go index 10214fad92..bf4518749a 100644 --- a/tests/integration/query/simple/with_filter/with_eq_datetime_test.go +++ b/tests/integration/query/simple/with_filter/with_eq_datetime_test.go @@ -20,7 +20,7 @@ func TestQuerySimpleWithDateTimeEqualsFilterBlock(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic filter(age)", Request: `query { - Users(filter: {CreatedAt: {_eq: "2017-07-23T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_eq: "2017-07-23T03:46:56-05:00"}}) { Name Age CreatedAt @@ -31,12 +31,12 @@ func TestQuerySimpleWithDateTimeEqualsFilterBlock(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2016-07-23T03:46:56.647Z" + "CreatedAt": "2016-07-23T03:46:56-05:00" }`, }, }, @@ -44,7 +44,7 @@ func TestQuerySimpleWithDateTimeEqualsFilterBlock(t *testing.T) { { "Name": "John", "Age": int64(21), - "CreatedAt": "2017-07-23T03:46:56.647Z", + "CreatedAt": testUtils.MustParseTime("2017-07-23T03:46:56-05:00"), }, }, } @@ -67,12 +67,12 @@ func TestQuerySimpleWithDateTimeEqualsNilFilterBlock(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2016-07-23T03:46:56.647Z" + "CreatedAt": "2016-07-23T03:46:56-05:00" }`, `{ "Name": "Fred", diff --git a/tests/integration/query/simple/with_filter/with_ge_datetime_test.go b/tests/integration/query/simple/with_filter/with_ge_datetime_test.go index fc38b3f002..69eddcd9c4 100644 --- a/tests/integration/query/simple/with_filter/with_ge_datetime_test.go +++ b/tests/integration/query/simple/with_filter/with_ge_datetime_test.go @@ -20,7 +20,7 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithEqualValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic ge int filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_ge: "2017-07-23T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_ge: "2017-07-23T03:46:56-05:00"}}) { Name } }`, @@ -29,12 +29,12 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithEqualValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, @@ -52,7 +52,7 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithGreaterValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic ge int filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_ge: "2017-07-22T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_ge: "2017-07-22T03:46:56-05:00"}}) { Name } }`, @@ -61,12 +61,12 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithGreaterValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, @@ -84,7 +84,7 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithLesserValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic ge int filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_ge: "2017-07-25T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_ge: "2017-07-25T03:46:56-05:00"}}) { Name } }`, @@ -93,12 +93,12 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithLesserValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, @@ -120,7 +120,7 @@ func TestQuerySimpleWithDateTimeGEFilterBlockWithNilValue(t *testing.T) { 0: { `{ "Name": "John", - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, `{ "Name": "Bob" diff --git a/tests/integration/query/simple/with_filter/with_gt_datetime_test.go b/tests/integration/query/simple/with_filter/with_gt_datetime_test.go index a44f5cae28..468dcf07e5 100644 --- a/tests/integration/query/simple/with_filter/with_gt_datetime_test.go +++ b/tests/integration/query/simple/with_filter/with_gt_datetime_test.go @@ -20,7 +20,7 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithEqualValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic gt datetime filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_gt: "2017-07-20T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_gt: "2017-07-20T03:46:56-05:00"}}) { Name } }`, @@ -29,12 +29,12 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithEqualValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, @@ -52,7 +52,7 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithGreaterValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic gt DateTime filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_gt: "2017-07-22T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_gt: "2017-07-22T03:46:56-05:00"}}) { Name } }`, @@ -61,12 +61,12 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithGreaterValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, @@ -84,7 +84,7 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithLesserValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic gt datetime filter with lesser value", Request: `query { - Users(filter: {CreatedAt: {_gt: "2017-07-25T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_gt: "2017-07-25T03:46:56-05:00"}}) { Name } }`, @@ -93,12 +93,12 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithLesserValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, @@ -120,7 +120,7 @@ func TestQuerySimpleWithDateTimeGTFilterBlockWithNilValue(t *testing.T) { 0: { `{ "Name": "John", - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, `{ "Name": "Bob" diff --git a/tests/integration/query/simple/with_filter/with_le_datetime_test.go b/tests/integration/query/simple/with_filter/with_le_datetime_test.go index 97c56361ed..051a97de43 100644 --- a/tests/integration/query/simple/with_filter/with_le_datetime_test.go +++ b/tests/integration/query/simple/with_filter/with_le_datetime_test.go @@ -20,7 +20,7 @@ func TestQuerySimpleWithDateTimeLEFilterBlockWithEqualValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic le DateTime filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_le: "2017-07-23T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_le: "2017-07-23T03:46:56-05:00"}}) { Name } }`, @@ -29,12 +29,12 @@ func TestQuerySimpleWithDateTimeLEFilterBlockWithEqualValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, }, }, @@ -52,7 +52,7 @@ func TestQuerySimpleWithDateTimeLEFilterBlockWithGreaterValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic le DateTime filter with greater value", Request: `query { - Users(filter: {CreatedAt: {_le: "2018-07-23T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_le: "2018-07-23T03:46:56-05:00"}}) { Name } }`, @@ -61,12 +61,12 @@ func TestQuerySimpleWithDateTimeLEFilterBlockWithGreaterValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, }, }, @@ -93,7 +93,7 @@ func TestQuerySimpleWithDateTimeLEFilterBlockWithNullValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", diff --git a/tests/integration/query/simple/with_filter/with_lt_datetime_test.go b/tests/integration/query/simple/with_filter/with_lt_datetime_test.go index a7787acc9d..0d17607891 100644 --- a/tests/integration/query/simple/with_filter/with_lt_datetime_test.go +++ b/tests/integration/query/simple/with_filter/with_lt_datetime_test.go @@ -20,7 +20,7 @@ func TestQuerySimpleWithDateTimeLTFilterBlockWithGreaterValue(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with basic lt DateTime filter with equal value", Request: `query { - Users(filter: {CreatedAt: {_lt: "2017-07-25T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_lt: "2017-07-25T03:46:56-05:00"}}) { Name } }`, @@ -29,12 +29,12 @@ func TestQuerySimpleWithDateTimeLTFilterBlockWithGreaterValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, }, }, @@ -61,7 +61,7 @@ func TestQuerySimpleWithDateTimeLTFilterBlockWithNullValue(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", diff --git a/tests/integration/query/simple/with_filter/with_ne_datetime_test.go b/tests/integration/query/simple/with_filter/with_ne_datetime_test.go index 0fbc007d1f..0179684732 100644 --- a/tests/integration/query/simple/with_filter/with_ne_datetime_test.go +++ b/tests/integration/query/simple/with_filter/with_ne_datetime_test.go @@ -20,7 +20,7 @@ func TestQuerySimpleWithDateTimeNotEqualsFilterBlock(t *testing.T) { test := testUtils.RequestTestCase{ Description: "Simple query with ne DateTime filter", Request: `query { - Users(filter: {CreatedAt: {_ne: "2017-07-23T03:46:56.647Z"}}) { + Users(filter: {CreatedAt: {_ne: "2017-07-23T03:46:56-05:00"}}) { Name } }`, @@ -29,12 +29,12 @@ func TestQuerySimpleWithDateTimeNotEqualsFilterBlock(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2011-07-23T03:46:56.647Z" + "CreatedAt": "2011-07-23T03:46:56-05:00" }`, }, }, @@ -61,12 +61,12 @@ func TestQuerySimpleWithDateTimeNotEqualsNilFilterBlock(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2011-07-23T03:46:56.647Z" + "CreatedAt": "2011-07-23T03:46:56-05:00" }`, `{ "Name": "Fred", diff --git a/tests/integration/query/simple/with_group_average_filter_test.go b/tests/integration/query/simple/with_group_average_filter_test.go index 23d79b0bf8..fe33e9f4ef 100644 --- a/tests/integration/query/simple/with_group_average_filter_test.go +++ b/tests/integration/query/simple/with_group_average_filter_test.go @@ -118,7 +118,7 @@ func TestQuerySimpleWithGroupByStringWithRenderedGroupAndChildAverageWithDateTim Request: `query { Users(groupBy: [Name]) { Name - _avg(_group: {field: Age, filter: {CreatedAt: {_gt: "2017-07-23T03:46:56.647Z"}}}) + _avg(_group: {field: Age, filter: {CreatedAt: {_gt: "2017-07-23T03:46:56-05:00"}}}) _group { Age } @@ -129,17 +129,17 @@ func TestQuerySimpleWithGroupByStringWithRenderedGroupAndChildAverageWithDateTim `{ "Name": "John", "Age": 34, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, `{ "Name": "John", "Age": 32, - "CreatedAt": "2018-07-23T03:46:56.647Z" + "CreatedAt": "2018-07-23T03:46:56-05:00" }`, `{ "Name": "Alice", "Age": 19, - "CreatedAt": "2011-07-23T03:46:56.647Z" + "CreatedAt": "2011-07-23T03:46:56-05:00" }`, }, }, @@ -226,8 +226,8 @@ func TestQuerySimpleWithGroupByStringWithRenderedGroupWithFilterAndChildAverageW Request: `query { Users(groupBy: [Name]) { Name - _avg(_group: {field: Age, filter: {CreatedAt: {_gt: "2016-07-23T03:46:56.647Z"}}}) - _group(filter: {CreatedAt: {_gt: "2016-07-23T03:46:56.647Z"}}) { + _avg(_group: {field: Age, filter: {CreatedAt: {_gt: "2016-07-23T03:46:56-05:00"}}}) + _group(filter: {CreatedAt: {_gt: "2016-07-23T03:46:56-05:00"}}) { Age } } @@ -237,17 +237,17 @@ func TestQuerySimpleWithGroupByStringWithRenderedGroupWithFilterAndChildAverageW `{ "Name": "John", "Age": 34, - "CreatedAt": "2017-07-23T03:46:56.647Z" + "CreatedAt": "2017-07-23T03:46:56-05:00" }`, `{ "Name": "John", "Age": 32, - "CreatedAt": "2011-07-23T03:46:56.647Z" + "CreatedAt": "2011-07-23T03:46:56-05:00" }`, `{ "Name": "Alice", "Age": 19, - "CreatedAt": "2010-07-23T03:46:56.647Z" + "CreatedAt": "2010-07-23T03:46:56-05:00" }`, }, }, diff --git a/tests/integration/query/simple/with_group_test.go b/tests/integration/query/simple/with_group_test.go index 3fae88b1ef..e740787d3c 100644 --- a/tests/integration/query/simple/with_group_test.go +++ b/tests/integration/query/simple/with_group_test.go @@ -111,31 +111,31 @@ func TestQuerySimpleWithGroupByDateTime(t *testing.T) { 0: { `{ "Name": "John", - "CreatedAt": "2011-07-23T03:46:56.647Z" + "CreatedAt": "2011-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", - "CreatedAt": "2011-07-23T03:46:56.647Z" + "CreatedAt": "2011-07-23T03:46:56-05:00" }`, `{ "Name": "Carlo", - "CreatedAt": "2012-07-23T03:46:56.647Z" + "CreatedAt": "2012-07-23T03:46:56-05:00" }`, `{ "Name": "Alice", - "CreatedAt": "2013-07-23T03:46:56.647Z" + "CreatedAt": "2013-07-23T03:46:56-05:00" }`, }, }, Results: []map[string]any{ { - "CreatedAt": "2013-07-23T03:46:56.647Z", + "CreatedAt": testUtils.MustParseTime("2013-07-23T03:46:56-05:00"), }, { - "CreatedAt": "2011-07-23T03:46:56.647Z", + "CreatedAt": testUtils.MustParseTime("2012-07-23T03:46:56-05:00"), }, { - "CreatedAt": "2012-07-23T03:46:56.647Z", + "CreatedAt": testUtils.MustParseTime("2011-07-23T03:46:56-05:00"), }, }, } diff --git a/tests/integration/query/simple/with_order_test.go b/tests/integration/query/simple/with_order_test.go index 0936feccb1..f66241d944 100644 --- a/tests/integration/query/simple/with_order_test.go +++ b/tests/integration/query/simple/with_order_test.go @@ -125,22 +125,22 @@ func TestQuerySimpleWithDateTimeOrderAscending(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2021-07-23T03:46:56.647Z" + "CreatedAt": "2021-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2032-07-23T03:46:56.647Z" + "CreatedAt": "2032-07-23T03:46:56-05:00" }`, `{ "Name": "Carlo", "Age": 55, - "CreatedAt": "2055-07-23T03:46:56.647Z" + "CreatedAt": "2055-07-23T03:46:56-05:00" }`, `{ "Name": "Alice", "Age": 19, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, }, }, @@ -233,22 +233,22 @@ func TestQuerySimpleWithDateTimeOrderDescending(t *testing.T) { `{ "Name": "John", "Age": 21, - "CreatedAt": "2021-07-23T03:46:56.647Z" + "CreatedAt": "2021-07-23T03:46:56-05:00" }`, `{ "Name": "Bob", "Age": 32, - "CreatedAt": "2032-07-23T03:46:56.647Z" + "CreatedAt": "2032-07-23T03:46:56-05:00" }`, `{ "Name": "Carlo", "Age": 55, - "CreatedAt": "2055-07-23T03:46:56.647Z" + "CreatedAt": "2055-07-23T03:46:56-05:00" }`, `{ "Name": "Alice", "Age": 19, - "CreatedAt": "2019-07-23T03:46:56.647Z" + "CreatedAt": "2019-07-23T03:46:56-05:00" }`, }, }, diff --git a/tests/integration/results.go b/tests/integration/results.go index 35a2249c0b..df21acef30 100644 --- a/tests/integration/results.go +++ b/tests/integration/results.go @@ -13,6 +13,7 @@ package tests import ( "encoding/json" "testing" + "time" "github.com/sourcenetwork/immutable" "github.com/stretchr/testify/assert" @@ -139,6 +140,8 @@ func areResultsEqual(expected any, actual any) bool { return areResultArraysEqual(expectedVal, actual) case []immutable.Option[string]: return areResultArraysEqual(expectedVal, actual) + case time.Time: + return areResultsEqual(expectedVal.Format(time.RFC3339), actual) default: return assert.ObjectsAreEqualValues(expected, actual) } diff --git a/tests/integration/schema/updates/add/field/kind/datetime_test.go b/tests/integration/schema/updates/add/field/kind/datetime_test.go index 6ebcc3af6f..a9ee10a2de 100644 --- a/tests/integration/schema/updates/add/field/kind/datetime_test.go +++ b/tests/integration/schema/updates/add/field/kind/datetime_test.go @@ -62,7 +62,7 @@ func TestSchemaUpdatesAddFieldKindDateTimeWithCreate(t *testing.T) { testUtils.SchemaPatch{ Patch: ` [ - { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 4} } + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 10} } ] `, }, @@ -70,7 +70,7 @@ func TestSchemaUpdatesAddFieldKindDateTimeWithCreate(t *testing.T) { CollectionID: 0, Doc: `{ "name": "John", - "foo": "2017-07-23T03:46:56.647Z" + "foo": "2017-07-23T03:46:56-05:00" }`, }, testUtils.Request{ @@ -83,7 +83,7 @@ func TestSchemaUpdatesAddFieldKindDateTimeWithCreate(t *testing.T) { Results: []map[string]any{ { "name": "John", - "foo": "2017-07-23T03:46:56.647Z", + "foo": testUtils.MustParseTime("2017-07-23T03:46:56-05:00"), }, }, }, @@ -114,7 +114,7 @@ func TestSchemaUpdatesAddFieldKindDateTimeSubstitutionWithCreate(t *testing.T) { CollectionID: 0, Doc: `{ "name": "John", - "foo": "2017-07-23T03:46:56.647Z" + "foo": "2017-07-23T03:46:56-05:00" }`, }, testUtils.Request{ @@ -127,7 +127,7 @@ func TestSchemaUpdatesAddFieldKindDateTimeSubstitutionWithCreate(t *testing.T) { Results: []map[string]any{ { "name": "John", - "foo": "2017-07-23T03:46:56.647Z", + "foo": testUtils.MustParseTime("2017-07-23T03:46:56-05:00"), }, }, }, diff --git a/tests/integration/subscription/subscription_test.go b/tests/integration/subscription/subscription_test.go index 49f8bf1f55..947330fbd0 100644 --- a/tests/integration/subscription/subscription_test.go +++ b/tests/integration/subscription/subscription_test.go @@ -288,7 +288,7 @@ func TestSubscriptionWithUpdateAllMutations(t *testing.T) { "points": float64(55), }, { - "_docID": "bae-cf723876-5c6a-5dcf-a877-ab288eb30d57", + "_docID": "bae-76b0f3f5-964c-57c3-b44b-4a91bea70d40", "age": int64(31), "name": "Addo", "points": float64(55), diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index ccfeba4d7a..3e38dba6db 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -32,6 +32,7 @@ import ( "github.com/sourcenetwork/defradb/errors" "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/net" + "github.com/sourcenetwork/defradb/request/graphql" changeDetector "github.com/sourcenetwork/defradb/tests/change_detector" "github.com/sourcenetwork/defradb/tests/clients" "github.com/sourcenetwork/defradb/tests/gen" @@ -817,19 +818,14 @@ func refreshDocuments( for i := 0; i < startActionIndex; i++ { switch action := s.testCase.Actions[i].(type) { case CreateDoc: - // We need to add the existing documents in the order in which the test case lists them - // otherwise they cannot be referenced correctly by other actions. - doc, err := client.NewDocFromJSON([]byte(action.Doc)) - if err != nil { - // If an err has been returned, ignore it - it may be expected and if not - // the test will fail later anyway - continue - } - // Just use the collection from the first relevant node, as all will be the same for this // purpose. collection := getNodeCollections(action.NodeID, s.collections)[0][action.CollectionID] - if err := doc.RemapAliasFieldsAndDocID(collection.Schema().Fields); err != nil { + + // We need to add the existing documents in the order in which the test case lists them + // otherwise they cannot be referenced correctly by other actions. + doc, err := client.NewDocFromJSON([]byte(action.Doc), collection.Schema()) + if err != nil { // If an err has been returned, ignore it - it may be expected and if not // the test will fail later anyway continue @@ -1118,7 +1114,7 @@ func createDocViaColSave( collections []client.Collection, ) (*client.Document, error) { var err error - doc, err := client.NewDocFromJSON([]byte(action.Doc)) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Schema()) if err != nil { return nil, err } @@ -1133,7 +1129,7 @@ func createDocViaColCreate( collections []client.Collection, ) (*client.Document, error) { var err error - doc, err := client.NewDocFromJSON([]byte(action.Doc)) + doc, err := client.NewDocFromJSON([]byte(action.Doc), collections[action.CollectionID].Schema()) if err != nil { return nil, err } @@ -1247,13 +1243,20 @@ func updateDocViaColSave( node client.P2P, collections []client.Collection, ) error { - doc := s.documents[action.CollectionID][action.DocID] + cachedDoc := s.documents[action.CollectionID][action.DocID] - err := doc.SetWithJSON([]byte(action.Doc)) + doc, err := collections[action.CollectionID].Get(s.ctx, cachedDoc.ID(), true) if err != nil { return err } + err = doc.SetWithJSON([]byte(action.Doc)) + if err != nil { + return err + } + + s.documents[action.CollectionID][action.DocID] = doc + return collections[action.CollectionID].Save(s.ctx, doc) } @@ -1263,13 +1266,20 @@ func updateDocViaColUpdate( node client.P2P, collections []client.Collection, ) error { - doc := s.documents[action.CollectionID][action.DocID] + cachedDoc := s.documents[action.CollectionID][action.DocID] - err := doc.SetWithJSON([]byte(action.Doc)) + doc, err := collections[action.CollectionID].Get(s.ctx, cachedDoc.ID(), true) if err != nil { return err } + err = doc.SetWithJSON([]byte(action.Doc)) + if err != nil { + return err + } + + s.documents[action.CollectionID][action.DocID] = doc + return collections[action.CollectionID].Update(s.ctx, doc) } @@ -1860,3 +1870,27 @@ func skipIfMutationTypeUnsupported(t *testing.T, supportedMutationTypes immutabl } } } + +func ParseSDL(gqlSDL string) (map[string]client.CollectionDefinition, error) { + parser, err := graphql.NewParser() + if err != nil { + return nil, err + } + cols, err := parser.ParseSDL(context.Background(), gqlSDL) + if err != nil { + return nil, err + } + result := make(map[string]client.CollectionDefinition) + for _, col := range cols { + result[col.Description.Name] = col + } + return result, nil +} + +func MustParseTime(timeString string) time.Time { + t, err := time.Parse(time.RFC3339, timeString) + if err != nil { + panic(err) + } + return t +} diff --git a/tests/predefined/gen_predefined.go b/tests/predefined/gen_predefined.go index 9dc6da0dd6..76e143c896 100644 --- a/tests/predefined/gen_predefined.go +++ b/tests/predefined/gen_predefined.go @@ -141,7 +141,7 @@ func (this *docGenerator) generatePrimary( if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } - primDoc, err := client.NewDocFromMap(primDocMap) + primDoc, err := client.NewDocFromMap(primDocMap, primType.Schema) if err != nil { return nil, nil, NewErrFailedToGenerateDoc(err) } @@ -174,7 +174,7 @@ func (this *docGenerator) generateRelatedDocs(docMap map[string]any, typeName st if err != nil { return nil, err } - doc, err := client.NewDocFromMap(requested) + doc, err := client.NewDocFromMap(requested, typeDef.Schema) if err != nil { return nil, NewErrFailedToGenerateDoc(err) } diff --git a/tests/predefined/gen_predefined_test.go b/tests/predefined/gen_predefined_test.go index b63617690d..ae68cf9804 100644 --- a/tests/predefined/gen_predefined_test.go +++ b/tests/predefined/gen_predefined_test.go @@ -14,8 +14,8 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" ) @@ -36,7 +36,10 @@ func TestGeneratePredefinedFromSchema_Simple(t *testing.T) { docs, err := CreateFromSDL(schema, docsList) assert.NoError(t, err) - errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs), docs) + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + errorMsg := assertDocs(mustAddDocIDsToDocs(docsList.Docs, colDefMap["User"].Schema), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -57,10 +60,13 @@ func TestGeneratePredefinedFromSchema_StripExcessiveFields(t *testing.T) { }) assert.NoError(t, err) + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - }), docs) + }, colDefMap["User"].Schema), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -96,12 +102,26 @@ func TestGeneratePredefinedFromSchema_OneToOne(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + userDocs := mustAddDocIDsToDocs([]map[string]any{ {"name": "John"}, {"name": "Fred"}, - {"model": "iPhone", "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"})}, - {"model": "MacBook", "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"})}, - }), docs) + }, colDefMap["User"].Schema) + + deviceDocs := mustAddDocIDsToDocs([]map[string]any{ + { + "model": "iPhone", + "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"].Schema), + }, + { + "model": "MacBook", + "owner_id": mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, colDefMap["User"].Schema), + }, + }, colDefMap["Device"].Schema) + + errorMsg := assertDocs(append(userDocs, deviceDocs...), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -137,12 +157,25 @@ func TestGeneratePredefinedFromSchema_OneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ - {"name": "John", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"})}, - {"name": "Fred", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"})}, + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + userDocs := mustAddDocIDsToDocs([]map[string]any{ + { + "name": "John", + "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema), + }, + { + "name": "Fred", + "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook"}, colDefMap["Device"].Schema), + }, + }, colDefMap["User"].Schema) + deviceDocs := mustAddDocIDsToDocs([]map[string]any{ {"model": "iPhone"}, {"model": "MacBook"}, - }), docs) + }, colDefMap["Device"].Schema) + + errorMsg := assertDocs(append(userDocs, deviceDocs...), docs) if errorMsg != "" { t.Error(errorMsg) } @@ -180,9 +213,18 @@ func TestGeneratePredefinedFromSchema_OneToOneToOnePrimary(t *testing.T) { }) assert.NoError(t, err) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName]}) - userDoc := mustAddDocIDToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.DocIDFieldName]}) + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"].Schema) + deviceDoc := mustAddDocIDToDoc(map[string]any{ + "model": "iPhone", + "specs_id": specsDoc[request.DocIDFieldName], + }, colDefMap["Device"].Schema) + userDoc := mustAddDocIDToDoc(map[string]any{ + "name": "John", + "device_id": deviceDoc[request.DocIDFieldName], + }, colDefMap["User"].Schema) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -222,13 +264,16 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneMiddle(t *testing.T) { }) assert.NoError(t, err) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}) - userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}) + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS"}, colDefMap["Specs"].Schema) + userDoc := mustAddDocIDToDoc(map[string]any{"name": "John"}, colDefMap["User"].Schema) deviceDoc := mustAddDocIDToDoc(map[string]any{ "model": "iPhone", "specs_id": specsDoc[request.DocIDFieldName], "owner_id": userDoc[request.DocIDFieldName], - }) + }, colDefMap["Device"].Schema) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -268,9 +313,18 @@ func TestGeneratePredefinedFromSchema_OneToTwoPrimary(t *testing.T) { }) assert.NoError(t, err) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}) - specsDoc := mustAddDocIDToDoc(map[string]any{"OS": "iOS", "device_id": deviceDoc[request.DocIDFieldName]}) - userDoc := mustAddDocIDToDoc(map[string]any{"name": "John", "device_id": deviceDoc[request.DocIDFieldName]}) + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema) + specsDoc := mustAddDocIDToDoc(map[string]any{ + "OS": "iOS", + "device_id": deviceDoc[request.DocIDFieldName], + }, colDefMap["Specs"].Schema) + userDoc := mustAddDocIDToDoc(map[string]any{ + "name": "John", + "device_id": deviceDoc[request.DocIDFieldName], + }, colDefMap["User"].Schema) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, specsDoc}, docs) if errorMsg != "" { @@ -310,13 +364,16 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { }) assert.NoError(t, err) - deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}) - addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}) + colDefMap, err := parseSDL(schema) + require.NoError(t, err) + + deviceDoc := mustAddDocIDToDoc(map[string]any{"model": "iPhone"}, colDefMap["Device"].Schema) + addressDoc := mustAddDocIDToDoc(map[string]any{"street": "Backer"}, colDefMap["Address"].Schema) userDoc := mustAddDocIDToDoc(map[string]any{ "name": "John", "device_id": deviceDoc[request.DocIDFieldName], "address_id": addressDoc[request.DocIDFieldName], - }) + }, colDefMap["User"].Schema) errorMsg := assertDocs([]map[string]any{userDoc, deviceDoc, addressDoc}, docs) if errorMsg != "" { @@ -324,185 +381,203 @@ func TestGeneratePredefinedFromSchema_TwoPrimaryToOneRoot(t *testing.T) { } } -func TestGeneratePredefinedFromSchema_OneToMany(t *testing.T) { - schema := ` - type User { - name: String - devices: [Device] - } - type Device { - model: String - owner: User - }` - - docs, err := CreateFromSDL(schema, DocsList{ - ColName: "User", - Docs: []map[string]any{ - { - "name": "John", - "devices": []map[string]any{ - {"model": "iPhone"}, - {"model": "PlayStation"}, - }, - }, - { - "name": "Fred", - "devices": []map[string]any{ - {"model": "Surface"}, - {"model": "Pixel"}, - }, - }, - }, - }) - assert.NoError(t, err) - - johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) - fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}) - errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ - {"name": "John"}, - {"name": "Fred"}, - {"model": "iPhone", "owner_id": johnDocID}, - {"model": "PlayStation", "owner_id": johnDocID}, - {"model": "Surface", "owner_id": fredDocID}, - {"model": "Pixel", "owner_id": fredDocID}, - }), docs) - if errorMsg != "" { - t.Error(errorMsg) - } -} - -func TestGeneratePredefinedFromSchema_OneToManyToOne(t *testing.T) { - schema := ` - type User { - name: String - devices: [Device] - } - type Device { - model: String - owner: User - specs: Specs - } - type Specs { - CPU: String - device: Device @primary - }` - - docs, err := CreateFromSDL(schema, DocsList{ - ColName: "User", - Docs: []map[string]any{ - { - "name": "John", - "devices": []map[string]any{ - { - "model": "iPhone", - "specs": map[string]any{ - "CPU": "A13", - }, - }, - { - "model": "MacBook", - "specs": map[string]any{ - "CPU": "M2", - }, - }, - }, - }, - }, - }) - assert.NoError(t, err) - - johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) - errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ - {"name": "John"}, - {"model": "iPhone", "owner_id": johnDocID}, - {"model": "MacBook", "owner_id": johnDocID}, - {"CPU": "A13", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "iPhone", "owner_id": johnDocID})}, - {"CPU": "M2", "device_id": mustGetDocIDFromDocMap(map[string]any{"model": "MacBook", "owner_id": johnDocID})}, - }), docs) - if errorMsg != "" { - t.Error(errorMsg) - } -} - -func TestGeneratePredefined_OneToMany(t *testing.T) { - defs := []client.CollectionDefinition{ - { - Description: client.CollectionDescription{ - Name: "User", - ID: 0, - }, - Schema: client.SchemaDescription{ - Name: "User", - Fields: []client.FieldDescription{ - { - Name: "name", - Kind: client.FieldKind_STRING, - }, - { - Name: "devices", - Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, - Schema: "Device", - RelationType: client.Relation_Type_MANY | client.Relation_Type_ONEMANY, - }, - }, - }, - }, - { - Description: client.CollectionDescription{ - Name: "Device", - ID: 1, - }, - Schema: client.SchemaDescription{ - Name: "Device", - Fields: []client.FieldDescription{ - { - Name: "model", - Kind: client.FieldKind_STRING, - }, - { - Name: "owner", - Kind: client.FieldKind_FOREIGN_OBJECT, - Schema: "User", - RelationType: client.Relation_Type_ONE | - client.Relation_Type_ONEMANY | - client.Relation_Type_Primary, - }, - }, - }, - }, - } - docs, err := Create(defs, DocsList{ - ColName: "User", - Docs: []map[string]any{ - { - "name": "John", - "devices": []map[string]any{ - {"model": "iPhone"}, - {"model": "PlayStation"}, - }, - }, - { - "name": "Fred", - "devices": []map[string]any{ - {"model": "Surface"}, - {"model": "Pixel"}, - }, - }, - }, - }) - assert.NoError(t, err) - - johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}) - fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}) - errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ - {"name": "John"}, - {"name": "Fred"}, - {"model": "iPhone", "owner_id": johnDocID}, - {"model": "PlayStation", "owner_id": johnDocID}, - {"model": "Surface", "owner_id": fredDocID}, - {"model": "Pixel", "owner_id": fredDocID}, - }), docs) - if errorMsg != "" { - t.Error(errorMsg) - } -} +// func TestGeneratePredefinedFromSchema_OneToMany(t *testing.T) { +// schema := ` +// type User { +// name: String +// devices: [Device] +// } +// type Device { +// model: String +// owner: User +// }` + +// docs, err := CreateFromSDL(schema, DocsList{ +// ColName: "User", +// Docs: []map[string]any{ +// { +// "name": "John", +// "devices": []map[string]any{ +// {"model": "iPhone"}, +// {"model": "PlayStation"}, +// }, +// }, +// { +// "name": "Fred", +// "devices": []map[string]any{ +// {"model": "Surface"}, +// {"model": "Pixel"}, +// }, +// }, +// }, +// }) +// assert.NoError(t, err) + +// colDefMap, err := parseSDL(schema) +// require.NoError(t, err) + +// johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"].Schema) +// fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, colDefMap["User"].Schema) +// errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ +// {"name": "John"}, +// {"name": "Fred"}, +// {"model": "iPhone", "owner_id": johnDocID}, +// {"model": "PlayStation", "owner_id": johnDocID}, +// {"model": "Surface", "owner_id": fredDocID}, +// {"model": "Pixel", "owner_id": fredDocID}, +// }, col), docs) +// if errorMsg != "" { +// t.Error(errorMsg) +// } +// } + +// func TestGeneratePredefinedFromSchema_OneToManyToOne(t *testing.T) { +// schema := ` +// type User { +// name: String +// devices: [Device] +// } +// type Device { +// model: String +// owner: User +// specs: Specs +// } +// type Specs { +// CPU: String +// device: Device @primary +// }` + +// docs, err := CreateFromSDL(schema, DocsList{ +// ColName: "User", +// Docs: []map[string]any{ +// { +// "name": "John", +// "devices": []map[string]any{ +// { +// "model": "iPhone", +// "specs": map[string]any{ +// "CPU": "A13", +// }, +// }, +// { +// "model": "MacBook", +// "specs": map[string]any{ +// "CPU": "M2", +// }, +// }, +// }, +// }, +// }, +// }) +// assert.NoError(t, err) + +// colDefMap, err := parseSDL(schema) +// require.NoError(t, err) + +// johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}, colDefMap["User"].Schema) +// errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ +// {"name": "John"}, +// {"model": "iPhone", "owner_id": johnDocID}, +// {"model": "MacBook", "owner_id": johnDocID}, +// { +// "CPU": "A13", +// "device_id": mustGetDocIDFromDocMap(map[string]any{ +// "model": "iPhone", +// "owner_id": johnDocID, +// }, colDefMap["Device"].Schema), +// }, +// { +// "CPU": "M2", +// "device_id": mustGetDocIDFromDocMap(map[string]any{ +// "model": "MacBook", +// "owner_id": johnDocID, +// }, colDefMap["Device"].Schema), +// }, +// }), docs) +// if errorMsg != "" { +// t.Error(errorMsg) +// } +// } + +// func TestGeneratePredefined_OneToMany(t *testing.T) { +// defs := []client.CollectionDefinition{ +// { +// Description: client.CollectionDescription{ +// Name: "User", +// ID: 0, +// }, +// Schema: client.SchemaDescription{ +// Name: "User", +// Fields: []client.FieldDescription{ +// { +// Name: "name", +// Kind: client.FieldKind_STRING, +// }, +// { +// Name: "devices", +// Kind: client.FieldKind_FOREIGN_OBJECT_ARRAY, +// Schema: "Device", +// RelationType: client.Relation_Type_MANY | client.Relation_Type_ONEMANY, +// }, +// }, +// }, +// }, +// { +// Description: client.CollectionDescription{ +// Name: "Device", +// ID: 1, +// }, +// Schema: client.SchemaDescription{ +// Name: "Device", +// Fields: []client.FieldDescription{ +// { +// Name: "model", +// Kind: client.FieldKind_STRING, +// }, +// { +// Name: "owner", +// Kind: client.FieldKind_FOREIGN_OBJECT, +// Schema: "User", +// RelationType: client.Relation_Type_ONE | +// client.Relation_Type_ONEMANY | +// client.Relation_Type_Primary, +// }, +// }, +// }, +// }, +// } +// docs, err := Create(defs, DocsList{ +// ColName: "User", +// Docs: []map[string]any{ +// { +// "name": "John", +// "devices": []map[string]any{ +// {"model": "iPhone"}, +// {"model": "PlayStation"}, +// }, +// }, +// { +// "name": "Fred", +// "devices": []map[string]any{ +// {"model": "Surface"}, +// {"model": "Pixel"}, +// }, +// }, +// }, +// }) +// assert.NoError(t, err) + +// johnDocID := mustGetDocIDFromDocMap(map[string]any{"name": "John"}, defs[0].Schema) +// fredDocID := mustGetDocIDFromDocMap(map[string]any{"name": "Fred"}, defs[0].Schema) +// errorMsg := assertDocs(mustAddDocIDsToDocs([]map[string]any{ +// {"name": "John"}, +// {"name": "Fred"}, +// {"model": "iPhone", "owner_id": johnDocID}, +// {"model": "PlayStation", "owner_id": johnDocID}, +// {"model": "Surface", "owner_id": fredDocID}, +// {"model": "Pixel", "owner_id": fredDocID}, +// }), docs) +// if errorMsg != "" { +// t.Error(errorMsg) +// } +// } diff --git a/tests/predefined/util_test.go b/tests/predefined/util_test.go index c06e6c0fdc..f155062503 100644 --- a/tests/predefined/util_test.go +++ b/tests/predefined/util_test.go @@ -68,22 +68,22 @@ outer: return "" } -func mustGetDocIDFromDocMap(docMap map[string]any) string { - doc, err := client.NewDocFromMap(docMap) +func mustGetDocIDFromDocMap(docMap map[string]any, sd client.SchemaDescription) string { + doc, err := client.NewDocFromMap(docMap, sd) if err != nil { panic("can not get doc from map" + err.Error()) } return doc.ID().String() } -func mustAddDocIDToDoc(doc map[string]any) map[string]any { - doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc) +func mustAddDocIDToDoc(doc map[string]any, sd client.SchemaDescription) map[string]any { + doc[request.DocIDFieldName] = mustGetDocIDFromDocMap(doc, sd) return doc } -func mustAddDocIDsToDocs(docs []map[string]any) []map[string]any { +func mustAddDocIDsToDocs(docs []map[string]any, sd client.SchemaDescription) []map[string]any { for i := range docs { - mustAddDocIDToDoc(docs[i]) + mustAddDocIDToDoc(docs[i], sd) } return docs } From 9241b4cac0a2a78fb15fe02da130efea3df65b35 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 08:45:27 -0800 Subject: [PATCH 38/60] bot: Update dependencies (bulk dependabot PRs) 08-01-2024 (#2173) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2172 bot: Bump @typescript-eslint/eslint-plugin from 6.16.0 to 6.18.0 in /playground #2170 bot: Bump @types/react from 18.2.46 to 18.2.47 in /playground #2169 bot: Bump graphiql from 3.0.10 to 3.1.0 in /playground #2168 bot: Bump vite from 5.0.10 to 5.0.11 in /playground ⚠️ The following PRs were resolved manually due to merge conflicts: #2171 bot: Bump @typescript-eslint/parser from 6.16.0 to 6.18.0 in /playground --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Shahzad Lone --- playground/package-lock.json | 405 +++++++++++++++++++++++++++++++---- playground/package.json | 10 +- 2 files changed, 371 insertions(+), 44 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 9a05301ece..48478dacb6 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -8,24 +8,24 @@ "name": "playground", "version": "0.0.0", "dependencies": { - "graphiql": "^3.0.10", + "graphiql": "^3.1.0", "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", "swagger-ui-react": "^5.10.5" }, "devDependencies": { - "@types/react": "^18.2.46", + "@types/react": "^18.2.47", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.16.0", - "@typescript-eslint/parser": "^6.16.0", + "@typescript-eslint/eslint-plugin": "^6.18.0", + "@typescript-eslint/parser": "^6.18.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", - "vite": "^5.0.10" + "vite": "^5.0.11" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -2237,9 +2237,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.46", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.46.tgz", - "integrity": "sha512-nNCvVBcZlvX4NU1nRRNV/mFl1nNRuTuslAJglQsq+8ldXe5Xv0Wd2f7WTE3jOxhLH2BFfiZGC6GCp+kHQbgG+w==", + "version": "18.2.47", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.47.tgz", + "integrity": "sha512-xquNkkOirwyCgoClNk85BjP+aqnIS+ckAJ8i37gAbDs14jfW/J23f2GItAf33oiUPQnqNMALiFeoM9Y5mbjpVQ==", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -2294,16 +2294,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.16.0.tgz", - "integrity": "sha512-O5f7Kv5o4dLWQtPX4ywPPa+v9G+1q1x8mz0Kr0pXUtKsevo+gIJHLkGc8RxaZWtP8RrhwhSNIWThnW42K9/0rQ==", + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.18.0.tgz", + "integrity": "sha512-3lqEvQUdCozi6d1mddWqd+kf8KxmGq2Plzx36BlkjuQe3rSTm/O98cLf0A4uDO+a5N1KD2SeEEl6fW97YHY+6w==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.16.0", - "@typescript-eslint/type-utils": "6.16.0", - "@typescript-eslint/utils": "6.16.0", - "@typescript-eslint/visitor-keys": "6.16.0", + "@typescript-eslint/scope-manager": "6.18.0", + "@typescript-eslint/type-utils": "6.18.0", + "@typescript-eslint/utils": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2328,16 +2328,63 @@ } } }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.0.tgz", + "integrity": "sha512-o/UoDT2NgOJ2VfHpfr+KBY2ErWvCySNUIX/X7O9g8Zzt/tXdpfEU43qbNk8LVuWUT2E0ptzTWXh79i74PP0twA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", + "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", + "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@typescript-eslint/parser": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.16.0.tgz", - "integrity": "sha512-H2GM3eUo12HpKZU9njig3DF5zJ58ja6ahj1GoHEHOgQvYxzoFJJEvC1MQ7T2l9Ha+69ZSOn7RTxOdpC/y3ikMw==", + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.18.0.tgz", + "integrity": "sha512-v6uR68SFvqhNQT41frCMCQpsP+5vySy6IdgjlzUWoo7ALCnpaWYcz/Ij2k4L8cEsL0wkvOviCMpjmtRtHNOKzA==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.16.0", - "@typescript-eslint/types": "6.16.0", - "@typescript-eslint/typescript-estree": "6.16.0", - "@typescript-eslint/visitor-keys": "6.16.0", + "@typescript-eslint/scope-manager": "6.18.0", + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/typescript-estree": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0", "debug": "^4.3.4" }, "engines": { @@ -2356,6 +2403,105 @@ } } }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.0.tgz", + "integrity": "sha512-o/UoDT2NgOJ2VfHpfr+KBY2ErWvCySNUIX/X7O9g8Zzt/tXdpfEU43qbNk8LVuWUT2E0ptzTWXh79i74PP0twA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", + "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", + "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", + "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/parser/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@typescript-eslint/scope-manager": { "version": "6.16.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.16.0.tgz", @@ -2374,13 +2520,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.16.0.tgz", - "integrity": "sha512-ThmrEOcARmOnoyQfYkHw/DX2SEYBalVECmoldVuH6qagKROp/jMnfXpAU/pAIWub9c4YTxga+XwgAkoA0pxfmg==", + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.0.tgz", + "integrity": "sha512-ZeMtrXnGmTcHciJN1+u2CigWEEXgy1ufoxtWcHORt5kGvpjjIlK9MUhzHm4RM8iVy6dqSaZA/6PVkX6+r+ChjQ==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.16.0", - "@typescript-eslint/utils": "6.16.0", + "@typescript-eslint/typescript-estree": "6.18.0", + "@typescript-eslint/utils": "6.18.0", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2400,6 +2546,88 @@ } } }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", + "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", + "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", + "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/type-utils/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@typescript-eslint/types": { "version": "6.16.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.16.0.tgz", @@ -2466,17 +2694,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.16.0.tgz", - "integrity": "sha512-T83QPKrBm6n//q9mv7oiSvy/Xq/7Hyw9SzSEhMHJwznEmQayfBM87+oAlkNAMEO7/MjIwKyOHgBJbxB0s7gx2A==", + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.18.0.tgz", + "integrity": "sha512-wiKKCbUeDPGaYEYQh1S580dGxJ/V9HI7K5sbGAVklyf+o5g3O+adnS4UNJajplF4e7z2q0uVBaTdT/yLb4XAVA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.16.0", - "@typescript-eslint/types": "6.16.0", - "@typescript-eslint/typescript-estree": "6.16.0", + "@typescript-eslint/scope-manager": "6.18.0", + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/typescript-estree": "6.18.0", "semver": "^7.5.4" }, "engines": { @@ -2490,6 +2718,105 @@ "eslint": "^7.0.0 || ^8.0.0" } }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.0.tgz", + "integrity": "sha512-o/UoDT2NgOJ2VfHpfr+KBY2ErWvCySNUIX/X7O9g8Zzt/tXdpfEU43qbNk8LVuWUT2E0ptzTWXh79i74PP0twA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", + "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", + "dev": true, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", + "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "@typescript-eslint/visitor-keys": "6.18.0", + "debug": "^4.3.4", + "globby": "^11.1.0", + "is-glob": "^4.0.3", + "minimatch": "9.0.3", + "semver": "^7.5.4", + "ts-api-utils": "^1.0.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { + "version": "6.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", + "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.0", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@typescript-eslint/utils/node_modules/minimatch": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", + "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/@typescript-eslint/visitor-keys": { "version": "6.16.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.16.0.tgz", @@ -3696,9 +4023,9 @@ "dev": true }, "node_modules/graphiql": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.0.10.tgz", - "integrity": "sha512-xgRFCg0mgIyca8keWkmBFA3knh9exDg53SxqFh96ewoMWYLeziqc0xIGFe2L/As8Aw1u5pFZcW913HwX3IXztw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/graphiql/-/graphiql-3.1.0.tgz", + "integrity": "sha512-1l2PecYNvFYYNSYq+4vIJOACXkP60Kod0E0SnKu+2f0Ux/npFNr3TfwJLZs7eKqqSh0KODmorvHi/XBP46Ua7A==", "dependencies": { "@graphiql/react": "^0.20.2", "@graphiql/toolkit": "^0.9.1", @@ -5935,9 +6262,9 @@ "optional": true }, "node_modules/vite": { - "version": "5.0.10", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.10.tgz", - "integrity": "sha512-2P8J7WWgmc355HUMlFrwofacvr98DAjoE52BfdbwQtyLH06XKwaL/FMnmKM2crF0iX4MpmMKoDlNCB1ok7zHCw==", + "version": "5.0.11", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.0.11.tgz", + "integrity": "sha512-XBMnDjZcNAw/G1gEiskiM1v6yzM4GE5aMGvhWTlHAYYhxb7S3/V1s3m2LDHa8Vh6yIWYYB0iJwsEaS523c4oYA==", "dev": true, "dependencies": { "esbuild": "^0.19.3", diff --git a/playground/package.json b/playground/package.json index 85cf771f54..c2d42c05d6 100644 --- a/playground/package.json +++ b/playground/package.json @@ -10,23 +10,23 @@ "preview": "vite preview" }, "dependencies": { - "graphiql": "^3.0.10", + "graphiql": "^3.1.0", "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", "swagger-ui-react": "^5.10.5" }, "devDependencies": { - "@types/react": "^18.2.46", + "@types/react": "^18.2.47", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/parser": "^6.16.0", - "@typescript-eslint/eslint-plugin": "^6.16.0", + "@typescript-eslint/parser": "^6.18.0", + "@typescript-eslint/eslint-plugin": "^6.18.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-refresh": "^0.4.5", "typescript": "^5.3.3", - "vite": "^5.0.10" + "vite": "^5.0.11" } } From f7a4166e234f7b6437aa0787320da269b0f07b60 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Mon, 8 Jan 2024 15:10:36 -0500 Subject: [PATCH 39/60] feat: Add PN Counter CRDT type (#2119) ## Relevant issue(s) Resolves #2115 ## Description This PR adds the PN Counter CRDT type. This type enforces changes that are incrementing or decrementing the value. A value cannot be set directly. The PR also simplifies the CRDT flow a bit further. For example, the `Data` field on the composite CRDT has been removed since the type links to the changed fields. This change will removes the data leak that would be present once we implement field level access control. --- cli/utils.go | 3 - client/ctype.go | 42 ++++ client/document.go | 18 +- client/errors.go | 16 +- client/value.go | 83 ++----- config/config.go | 11 +- core/crdt/composite.go | 47 ++-- core/crdt/lwwreg.go | 32 ++- core/crdt/pncounter.go | 195 ++++++++++++++++ core/delta.go | 2 +- db/base/collection_keys.go | 2 +- db/collection.go | 92 ++------ db/collection_delete.go | 1 - db/errors.go | 9 - db/fetcher/indexer_iterators.go | 8 +- db/fetcher/versioned.go | 8 +- db/index.go | 9 +- db/indexed_docs_test.go | 12 +- .../i2115-add-pn-counter-crdt.md | 3 + examples/schema/user.graphql | 2 +- http/handler_collection.go | 32 +-- merkle/clock/clock_test.go | 25 +-- merkle/crdt/composite.go | 16 +- merkle/crdt/errors.go | 35 +++ merkle/crdt/lwwreg.go | 15 +- merkle/crdt/merklecrdt.go | 23 +- merkle/crdt/merklecrdt_test.go | 34 +-- merkle/crdt/pncounter.go | 57 +++++ net/process.go | 6 +- request/graphql/schema/collection.go | 30 ++- tests/gen/cli/util_test.go | 10 +- .../events/simple/with_update_test.go | 4 +- .../mutation/create/crdt/pncounter_test.go | 55 +++++ .../mutation/create/with_version_test.go | 2 +- .../mutation/update/crdt/pncounter_test.go | 211 ++++++++++++++++++ .../state/simple/peer/crdt/pncounter_test.go | 69 ++++++ .../peer_replicator/crdt/pncounter_test.go | 160 +++++++++++++ .../simple/replicator/crdt/pncounter_test.go | 71 ++++++ .../integration/query/commits/simple_test.go | 24 +- .../query/commits/with_cid_test.go | 8 +- .../query/commits/with_depth_test.go | 34 +-- .../query/commits/with_doc_id_cid_test.go | 4 +- .../query/commits/with_doc_id_count_test.go | 6 +- .../query/commits/with_doc_id_field_test.go | 4 +- .../commits/with_doc_id_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_limit_test.go | 4 +- .../with_doc_id_order_limit_offset_test.go | 4 +- .../query/commits/with_doc_id_order_test.go | 74 +++--- .../query/commits/with_doc_id_test.go | 46 ++-- .../commits/with_doc_id_typename_test.go | 6 +- .../query/commits/with_field_test.go | 6 +- .../query/commits/with_group_test.go | 16 +- .../latest_commits/with_doc_id_field_test.go | 8 +- .../query/latest_commits/with_doc_id_test.go | 8 +- .../query/one_to_many/with_cid_doc_id_test.go | 12 +- .../query/simple/with_cid_doc_id_test.go | 134 +++++++++-- .../query/simple/with_version_test.go | 12 +- tests/integration/schema/crdt_type_test.go | 134 +++++++++++ .../updates/add/field/crdt/composite_test.go | 2 +- .../updates/add/field/crdt/invalid_test.go | 2 +- .../add/field/crdt/object_bool_test.go | 2 +- .../updates/add/field/crdt/pncounter_test.go | 73 ++++++ .../one_to_many/with_introspection_test.go | 2 +- 63 files changed, 1608 insertions(+), 471 deletions(-) create mode 100644 core/crdt/pncounter.go create mode 100644 docs/data_format_changes/i2115-add-pn-counter-crdt.md create mode 100644 merkle/crdt/errors.go create mode 100644 merkle/crdt/pncounter.go create mode 100644 tests/integration/mutation/create/crdt/pncounter_test.go create mode 100644 tests/integration/mutation/update/crdt/pncounter_test.go create mode 100644 tests/integration/net/state/simple/peer/crdt/pncounter_test.go create mode 100644 tests/integration/net/state/simple/peer_replicator/crdt/pncounter_test.go create mode 100644 tests/integration/net/state/simple/replicator/crdt/pncounter_test.go create mode 100644 tests/integration/schema/crdt_type_test.go create mode 100644 tests/integration/schema/updates/add/field/crdt/pncounter_test.go diff --git a/cli/utils.go b/cli/utils.go index 8c1a40dc1f..0f3fa0e565 100644 --- a/cli/utils.go +++ b/cli/utils.go @@ -99,9 +99,6 @@ func setStoreContext(cmd *cobra.Command, cfg *config.Config) error { // loadConfig loads the rootDir containing the configuration file, // otherwise warn about it and load a default configuration. func loadConfig(cfg *config.Config) error { - if err := cfg.LoadRootDirFromFlagOrDefault(); err != nil { - return err - } return cfg.LoadWithRootdir(cfg.ConfigFileExists()) } diff --git a/client/ctype.go b/client/ctype.go index 96ec19eaf2..7c194c73bf 100644 --- a/client/ctype.go +++ b/client/ctype.go @@ -22,4 +22,46 @@ const ( LWW_REGISTER OBJECT COMPOSITE + PN_COUNTER ) + +// IsSupportedFieldCType returns true if the type is supported as a document field type. +func (t CType) IsSupportedFieldCType() bool { + switch t { + case NONE_CRDT, LWW_REGISTER, PN_COUNTER: + return true + default: + return false + } +} + +// IsCompatibleWith returns true if the CRDT is compatible with the field kind +func (t CType) IsCompatibleWith(kind FieldKind) bool { + switch t { + case PN_COUNTER: + if kind == FieldKind_INT || kind == FieldKind_FLOAT { + return true + } + return false + default: + return true + } +} + +// String returns the string representation of the CRDT. +func (t CType) String() string { + switch t { + case NONE_CRDT: + return "none" + case LWW_REGISTER: + return "lww" + case OBJECT: + return "object" + case COMPOSITE: + return "composite" + case PN_COUNTER: + return "pncounter" + default: + return "unknown" + } +} diff --git a/client/document.go b/client/document.go index 113ddd1b1b..8fb5e28868 100644 --- a/client/document.go +++ b/client/document.go @@ -59,7 +59,7 @@ import ( type Document struct { id DocID fields map[string]Field - values map[Field]Value + values map[Field]*FieldValue head cid.Cid mu sync.RWMutex // marks if document has unsaved changes @@ -71,7 +71,7 @@ type Document struct { func newEmptyDoc(sd SchemaDescription) *Document { return &Document{ fields: make(map[string]Field), - values: make(map[Field]Value), + values: make(map[Field]*FieldValue), schemaDescription: sd, } } @@ -151,7 +151,7 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { } docs := make([]*Document, len(a)) - for _, v := range a { + for i, v := range a { o, err := v.Object() if err != nil { return nil, err @@ -165,7 +165,7 @@ func NewDocsFromJSON(obj []byte, sd SchemaDescription) ([]*Document, error) { if err != nil { return nil, err } - docs = append(docs, doc) + docs[i] = doc } return docs, nil @@ -421,7 +421,7 @@ func (doc *Document) Get(field string) (any, error) { } // GetValue given a field as a string, return the Value type. -func (doc *Document) GetValue(field string) (Value, error) { +func (doc *Document) GetValue(field string) (*FieldValue, error) { doc.mu.RLock() defer doc.mu.RUnlock() path, subPaths, hasSubPaths := parseFieldPath(field) @@ -445,7 +445,7 @@ func (doc *Document) GetValue(field string) (Value, error) { } // GetValueWithField gets the Value type from a given Field type -func (doc *Document) GetValueWithField(f Field) (Value, error) { +func (doc *Document) GetValueWithField(f Field) (*FieldValue, error) { doc.mu.RLock() defer doc.mu.RUnlock() v, exists := doc.values[f] @@ -521,7 +521,7 @@ func (doc *Document) Delete(fields ...string) error { return nil } -func (doc *Document) set(t CType, field string, value Value) error { +func (doc *Document) set(t CType, field string, value *FieldValue) error { doc.mu.Lock() defer doc.mu.Unlock() var f Field @@ -537,7 +537,7 @@ func (doc *Document) set(t CType, field string, value Value) error { } func (doc *Document) setCBOR(t CType, field string, val any) error { - value := newCBORValue(t, val) + value := NewFieldValue(t, val) return doc.set(t, field, value) } @@ -562,7 +562,7 @@ func (doc *Document) Fields() map[string]Field { } // Values gets the document values as a map. -func (doc *Document) Values() map[Field]Value { +func (doc *Document) Values() map[Field]*FieldValue { doc.mu.RLock() defer doc.mu.RUnlock() return doc.values diff --git a/client/errors.go b/client/errors.go index a15e98f8f3..78daf3531b 100644 --- a/client/errors.go +++ b/client/errors.go @@ -24,8 +24,10 @@ const ( errMaxTxnRetries string = "reached maximum transaction reties" errRelationOneSided string = "relation must be defined on both schemas" errCollectionNotFound string = "collection not found" - errUnknownCRDT string = "unknown crdt" errFieldOrAliasToFieldNotExist string = "The given field or alias to field does not exist" + errUnknownCRDT string = "unknown crdt" + errCRDTKindMismatch string = "CRDT type %s can't be assigned to field kind %s" + errInvalidCRDTType string = "CRDT type not supported" ) // Errors returnable from this package. @@ -131,3 +133,15 @@ func NewErrUnknownCRDT(cType CType) error { func NewErrFieldOrAliasToFieldNotExist(name string) error { return errors.New(errFieldOrAliasToFieldNotExist, errors.NewKV("Name", name)) } + +func NewErrInvalidCRDTType(name, crdtType string) error { + return errors.New( + errInvalidCRDTType, + errors.NewKV("Name", name), + errors.NewKV("CRDTType", crdtType), + ) +} + +func NewErrCRDTKindMismatch(cType, kind string) error { + return errors.New(fmt.Sprintf(errCRDTKindMismatch, cType, kind)) +} diff --git a/client/value.go b/client/value.go index 3586e9b03f..ae462b74f2 100644 --- a/client/value.go +++ b/client/value.go @@ -15,119 +15,78 @@ import ( "github.com/sourcenetwork/immutable" ) -// Value is an interface that points to a concrete Value implementation. -// (TODO May collapse this down without an interface) -type Value interface { - Value() any - IsDocument() bool - Type() CType - IsDirty() bool - Clean() - IsDelete() bool //todo: Update IsDelete naming - Delete() -} - -// WriteableValue defines a simple interface with a Bytes() method -// which is used to indicate if a Value is writeable type versus -// a composite type like a Sub-Document. -// Writeable types include simple Strings/Ints/Floats/Binary -// that can be loaded into a CRDT Register, Set, Counter, etc. -type WriteableValue interface { - Value - - Bytes() ([]byte, error) -} - -type ReadableValue interface { - Value - - Read() (any, error) -} - -type simpleValue struct { +type FieldValue struct { t CType value any isDirty bool delete bool } -func newValue(t CType, val any) simpleValue { - return simpleValue{ +func NewFieldValue(t CType, val any) *FieldValue { + return &FieldValue{ t: t, value: val, isDirty: true, } } -// func (val simpleValue) Set(val any) - -func (val simpleValue) Value() any { +func (val FieldValue) Value() any { return val.value } -func (val simpleValue) Type() CType { +func (val FieldValue) Type() CType { return val.t } -func (val simpleValue) IsDocument() bool { +func (val FieldValue) IsDocument() bool { _, ok := val.value.(*Document) return ok } // IsDirty returns if the value is marked as dirty (unsaved/changed) -func (val simpleValue) IsDirty() bool { +func (val FieldValue) IsDirty() bool { return val.isDirty } -func (val *simpleValue) Clean() { +func (val *FieldValue) Clean() { val.isDirty = false val.delete = false } -func (val *simpleValue) Delete() { +func (val *FieldValue) Delete() { val.delete = true val.isDirty = true } -func (val simpleValue) IsDelete() bool { +func (val FieldValue) IsDelete() bool { return val.delete } -type cborValue struct { - *simpleValue -} - -// NewCBORValue creates a new CBOR value from a CRDT type and a value. -func NewCBORValue(t CType, val any) WriteableValue { - return newCBORValue(t, val) -} - -func newCBORValue(t CType, val any) WriteableValue { - v := newValue(t, val) - return cborValue{&v} +func (val *FieldValue) SetType(t CType) { + val.t = t } -func (v cborValue) Bytes() ([]byte, error) { +func (val FieldValue) Bytes() ([]byte, error) { em, err := cbor.EncOptions{Time: cbor.TimeRFC3339}.EncMode() if err != nil { return nil, err } - var val any - switch tempVal := v.value.(type) { + var value any + switch tempVal := val.value.(type) { case []immutable.Option[string]: - val = convertImmutable(tempVal) + value = convertImmutable(tempVal) case []immutable.Option[int64]: - val = convertImmutable(tempVal) + value = convertImmutable(tempVal) case []immutable.Option[float64]: - val = convertImmutable(tempVal) + value = convertImmutable(tempVal) case []immutable.Option[bool]: - val = convertImmutable(tempVal) + value = convertImmutable(tempVal) default: - val = v.value + value = val.value } - return em.Marshal(val) + return em.Marshal(value) } func convertImmutable[T any](vals []immutable.Option[T]) []any { diff --git a/config/config.go b/config/config.go index 24d5ee73e7..c179fc5db3 100644 --- a/config/config.go +++ b/config/config.go @@ -101,7 +101,14 @@ func DefaultConfig() *Config { cfg.v.SetConfigName(DefaultConfigFileName) cfg.v.SetConfigType(configType) - // Load default values in viper. + cfg.Persist() + + return cfg +} + +// Persist persists manually set config parameters to the viper config. +func (cfg *Config) Persist() { + // Load new values in viper. b, err := cfg.toBytes() if err != nil { panic(err) @@ -109,8 +116,6 @@ func DefaultConfig() *Config { if err = cfg.v.ReadConfig(bytes.NewReader(b)); err != nil { panic(NewErrReadingConfigFile(err)) } - - return cfg } // LoadWithRootdir loads a Config with parameters from defaults, config file, environment variables, and CLI flags. diff --git a/core/crdt/composite.go b/core/crdt/composite.go index 39c0a3efe4..6900387db3 100644 --- a/core/crdt/composite.go +++ b/core/crdt/composite.go @@ -31,19 +31,20 @@ import ( // CompositeDAGDelta represents a delta-state update made of sub-MerkleCRDTs. type CompositeDAGDelta struct { + DocID []byte + FieldName string + Priority uint64 // SchemaVersionID is the schema version datastore key at the time of commit. // - // It can be used to identify the collection datastructure state at time of commit. + // It can be used to identify the collection datastructure state at the time of commit. SchemaVersionID string - Priority uint64 - Data []byte - DocID []byte - SubDAGs []core.DAGLink // Status represents the status of the document. By default it is `Active`. // Alternatively, if can be set to `Deleted`. Status client.DocumentStatus - - FieldName string + // SubDAGS should not be marshalled as they are already + // stored as links in the DAG blocks. They are needed here to + // hold on to them for the block creation. + SubDAGs []core.DAGLink `json:"-"` } var _ core.CompositeDelta = (*CompositeDAGDelta)(nil) @@ -63,23 +64,18 @@ func (delta *CompositeDAGDelta) Marshal() ([]byte, error) { h := &codec.CborHandle{} buf := bytes.NewBuffer(nil) enc := codec.NewEncoder(buf, h) - err := enc.Encode(struct { - SchemaVersionID string - Priority uint64 - Data []byte - DocID []byte - Status uint8 - FieldName string - }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocID, delta.Status.UInt8(), delta.FieldName}) + err := enc.Encode(delta) if err != nil { return nil, err } return buf.Bytes(), nil } -// Value returns the value of this delta. -func (delta *CompositeDAGDelta) Value() any { - return delta.Data +// Unmarshal decodes the delta from CBOR. +func (delta *CompositeDAGDelta) Unmarshal(b []byte) error { + h := &codec.CborHandle{} + dec := codec.NewDecoderBytes(b, h) + return dec.Decode(delta) } // Links returns the links for this delta. @@ -109,17 +105,16 @@ func (c CompositeDAG) Value(ctx context.Context) ([]byte, error) { } // Set applies a delta to the composite DAG CRDT. TBD -func (c CompositeDAG) Set(patch []byte, links []core.DAGLink) *CompositeDAGDelta { +func (c CompositeDAG) Set(links []core.DAGLink) *CompositeDAGDelta { // make sure the links are sorted lexicographically by CID sort.Slice(links, func(i, j int) bool { return strings.Compare(links[i].Cid.String(), links[j].Cid.String()) < 0 }) return &CompositeDAGDelta{ - Data: patch, DocID: []byte(c.key.DocID), - SubDAGs: links, - SchemaVersionID: c.schemaVersionKey.SchemaVersionId, FieldName: c.fieldName, + SchemaVersionID: c.schemaVersionKey.SchemaVersionId, + SubDAGs: links, } } @@ -208,15 +203,13 @@ func (c CompositeDAG) deleteWithPrefix(ctx context.Context, key core.DataStoreKe // a CompositeDAGDelta from a ipld.Node // for now let's do cbor (quick to implement) func (c CompositeDAG) DeltaDecode(node ipld.Node) (core.Delta, error) { - delta := &CompositeDAGDelta{} pbNode, ok := node.(*dag.ProtoNode) if !ok { return nil, client.NewErrUnexpectedType[*dag.ProtoNode]("ipld.Node", node) } - data := pbNode.Data() - h := &codec.CborHandle{} - dec := codec.NewDecoderBytes(data, h) - err := dec.Decode(delta) + + delta := &CompositeDAGDelta{} + err := delta.Unmarshal(pbNode.Data()) if err != nil { return nil, err } diff --git a/core/crdt/lwwreg.go b/core/crdt/lwwreg.go index c256c35cea..937552d868 100644 --- a/core/crdt/lwwreg.go +++ b/core/crdt/lwwreg.go @@ -29,11 +29,14 @@ import ( // LWWRegDelta is a single delta operation for an LWWRegister // @todo: Expand delta metadata (investigate if needed) type LWWRegDelta struct { + DocID []byte + FieldName string + Priority uint64 + // SchemaVersionID is the schema version datastore key at the time of commit. + // + // It can be used to identify the collection datastructure state at the time of commit. SchemaVersionID string - Priority uint64 Data []byte - DocID []byte - FieldName string } var _ core.Delta = (*LWWRegDelta)(nil) @@ -54,21 +57,18 @@ func (delta *LWWRegDelta) Marshal() ([]byte, error) { h := &codec.CborHandle{} buf := bytes.NewBuffer(nil) enc := codec.NewEncoder(buf, h) - err := enc.Encode(struct { - SchemaVersionID string - Priority uint64 - Data []byte - DocID []byte - FieldName string - }{delta.SchemaVersionID, delta.Priority, delta.Data, delta.DocID, delta.FieldName}) + err := enc.Encode(delta) if err != nil { return nil, err } return buf.Bytes(), nil } -func (delta *LWWRegDelta) Value() any { - return delta.Data +// Unmarshal decodes the delta from CBOR. +func (delta *LWWRegDelta) Unmarshal(b []byte) error { + h := &codec.CborHandle{} + dec := codec.NewDecoderBytes(b, h) + return dec.Decode(delta) } // LWWRegister, Last-Writer-Wins Register, is a simple CRDT type that allows set/get @@ -166,15 +166,13 @@ func (reg LWWRegister) setValue(ctx context.Context, val []byte, priority uint64 // a LWWRegDelta from a ipld.Node // for now let's do cbor (quick to implement) func (reg LWWRegister) DeltaDecode(node ipld.Node) (core.Delta, error) { - delta := &LWWRegDelta{} pbNode, ok := node.(*dag.ProtoNode) if !ok { return nil, client.NewErrUnexpectedType[*dag.ProtoNode]("ipld.Node", node) } - data := pbNode.Data() - h := &codec.CborHandle{} - dec := codec.NewDecoderBytes(data, h) - err := dec.Decode(delta) + + delta := &LWWRegDelta{} + err := delta.Unmarshal(pbNode.Data()) if err != nil { return nil, err } diff --git a/core/crdt/pncounter.go b/core/crdt/pncounter.go new file mode 100644 index 0000000000..5d79e24f19 --- /dev/null +++ b/core/crdt/pncounter.go @@ -0,0 +1,195 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crdt + +import ( + "bytes" + "context" + + "github.com/fxamacker/cbor/v2" + dag "github.com/ipfs/boxo/ipld/merkledag" + ds "github.com/ipfs/go-datastore" + ipld "github.com/ipfs/go-ipld-format" + "github.com/ugorji/go/codec" + "golang.org/x/exp/constraints" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/datastore" + "github.com/sourcenetwork/defradb/db/base" + "github.com/sourcenetwork/defradb/errors" +) + +var ( + // ensure types implements core interfaces + _ core.ReplicatedData = (*PNCounter[float64])(nil) + _ core.ReplicatedData = (*PNCounter[int64])(nil) + _ core.Delta = (*PNCounterDelta[float64])(nil) + _ core.Delta = (*PNCounterDelta[int64])(nil) +) + +type Incrementable interface { + constraints.Integer | constraints.Float +} + +// PNCounterDelta is a single delta operation for an PNCounter +type PNCounterDelta[T Incrementable] struct { + DocID []byte + FieldName string + Priority uint64 + // SchemaVersionID is the schema version datastore key at the time of commit. + // + // It can be used to identify the collection datastructure state at the time of commit. + SchemaVersionID string + Data T +} + +// GetPriority gets the current priority for this delta. +func (delta *PNCounterDelta[T]) GetPriority() uint64 { + return delta.Priority +} + +// SetPriority will set the priority for this delta. +func (delta *PNCounterDelta[T]) SetPriority(prio uint64) { + delta.Priority = prio +} + +// Marshal encodes the delta using CBOR. +func (delta *PNCounterDelta[T]) Marshal() ([]byte, error) { + h := &codec.CborHandle{} + buf := bytes.NewBuffer(nil) + enc := codec.NewEncoder(buf, h) + err := enc.Encode(delta) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +// Unmarshal decodes the delta from CBOR. +func (delta *PNCounterDelta[T]) Unmarshal(b []byte) error { + h := &codec.CborHandle{} + dec := codec.NewDecoderBytes(b, h) + return dec.Decode(delta) +} + +// PNCounter, is a simple CRDT type that allows increment/decrement +// of an Int and Float data types that ensures convergence. +type PNCounter[T Incrementable] struct { + baseCRDT +} + +// NewPNCounter returns a new instance of the PNCounter with the given ID. +func NewPNCounter[T Incrementable]( + store datastore.DSReaderWriter, + schemaVersionKey core.CollectionSchemaVersionKey, + key core.DataStoreKey, + fieldName string, +) PNCounter[T] { + return PNCounter[T]{newBaseCRDT(store, key, schemaVersionKey, fieldName)} +} + +// Value gets the current register value +func (reg PNCounter[T]) Value(ctx context.Context) ([]byte, error) { + valueK := reg.key.WithValueFlag() + buf, err := reg.store.Get(ctx, valueK.ToDS()) + if err != nil { + return nil, err + } + return buf, nil +} + +// Set generates a new delta with the supplied value +func (reg PNCounter[T]) Increment(value T) *PNCounterDelta[T] { + return &PNCounterDelta[T]{ + DocID: []byte(reg.key.DocID), + FieldName: reg.fieldName, + Data: value, + SchemaVersionID: reg.schemaVersionKey.SchemaVersionId, + } +} + +// Merge implements ReplicatedData interface. +// It merges two PNCounterRegisty by adding the values together. +func (reg PNCounter[T]) Merge(ctx context.Context, delta core.Delta) error { + d, ok := delta.(*PNCounterDelta[T]) + if !ok { + return ErrMismatchedMergeType + } + + return reg.incrementValue(ctx, d.Data, d.GetPriority()) +} + +func (reg PNCounter[T]) incrementValue(ctx context.Context, value T, priority uint64) error { + key := reg.key.WithValueFlag() + marker, err := reg.store.Get(ctx, reg.key.ToPrimaryDataStoreKey().ToDS()) + if err != nil && !errors.Is(err, ds.ErrNotFound) { + return err + } + if bytes.Equal(marker, []byte{base.DeletedObjectMarker}) { + key = key.WithDeletedFlag() + } + + curValue, err := reg.getCurrentValue(ctx, key) + if err != nil { + return err + } + + newValue := curValue + value + b, err := cbor.Marshal(newValue) + if err != nil { + return err + } + + err = reg.store.Put(ctx, key.ToDS(), b) + if err != nil { + return NewErrFailedToStoreValue(err) + } + + return reg.setPriority(ctx, reg.key, priority) +} + +func (reg PNCounter[T]) getCurrentValue(ctx context.Context, key core.DataStoreKey) (T, error) { + curValue, err := reg.store.Get(ctx, key.ToDS()) + if err != nil { + if errors.Is(err, ds.ErrNotFound) { + return 0, nil + } + return 0, err + } + + return getNumericFromBytes[T](curValue) +} + +// DeltaDecode is a typed helper to extract a PNCounterDelta from a ipld.Node +func (reg PNCounter[T]) DeltaDecode(node ipld.Node) (core.Delta, error) { + pbNode, ok := node.(*dag.ProtoNode) + if !ok { + return nil, client.NewErrUnexpectedType[*dag.ProtoNode]("ipld.Node", node) + } + + delta := &PNCounterDelta[T]{} + err := delta.Unmarshal(pbNode.Data()) + if err != nil { + return nil, err + } + + return delta, nil +} + +func getNumericFromBytes[T Incrementable](b []byte) (T, error) { + var val T + err := cbor.Unmarshal(b, &val) + if err != nil { + return val, err + } + return val, nil +} diff --git a/core/delta.go b/core/delta.go index cddf51b71c..fda7dd13ae 100644 --- a/core/delta.go +++ b/core/delta.go @@ -20,7 +20,7 @@ type Delta interface { GetPriority() uint64 SetPriority(uint64) Marshal() ([]byte, error) - Value() any + Unmarshal(b []byte) error } // CompositeDelta represents a delta-state update to a composite CRDT. diff --git a/db/base/collection_keys.go b/db/base/collection_keys.go index f32da872fe..b2adc2f9e7 100644 --- a/db/base/collection_keys.go +++ b/db/base/collection_keys.go @@ -45,7 +45,7 @@ func MakePrimaryIndexKeyForCRDT( switch ctype { case client.COMPOSITE: return MakeDataStoreKeyWithCollectionDescription(c).WithInstanceInfo(key).WithFieldId(core.COMPOSITE_NAMESPACE), nil - case client.LWW_REGISTER: + case client.LWW_REGISTER, client.PN_COUNTER: field, ok := c.GetFieldByName(fieldName, &schema) if !ok { return core.DataStoreKey{}, client.NewErrFieldNotExist(fieldName) diff --git a/db/collection.go b/db/collection.go index abc9e767d6..352d3fc8a1 100644 --- a/db/collection.go +++ b/db/collection.go @@ -17,7 +17,6 @@ import ( "strconv" "strings" - "github.com/fxamacker/cbor/v2" "github.com/ipfs/go-cid" ds "github.com/ipfs/go-datastore" "github.com/ipfs/go-datastore/query" @@ -437,8 +436,12 @@ func validateUpdateSchemaFields( return false, NewErrCannotMoveField(proposedField.Name, proposedIndex, existingIndex) } - if proposedField.Typ != client.NONE_CRDT && proposedField.Typ != client.LWW_REGISTER { - return false, NewErrInvalidCRDTType(proposedField.Name, proposedField.Typ) + if !proposedField.Typ.IsSupportedFieldCType() { + return false, client.NewErrInvalidCRDTType(proposedField.Name, proposedField.Typ.String()) + } + + if !proposedField.Typ.IsCompatibleWith(proposedField.Kind) { + return false, client.NewErrCRDTKindMismatch(proposedField.Typ.String(), proposedField.Kind.String()) } newFieldNames[proposedField.Name] = struct{}{} @@ -929,7 +932,6 @@ func (c *collection) save( // => Set/Publish new CRDT values primaryKey := c.getPrimaryKeyFromDocID(doc.ID()) links := make([]core.DAGLink, 0) - docProperties := make(map[string]any) for k, v := range doc.Fields() { val, err := doc.GetValueWithField(v) if err != nil { @@ -948,6 +950,10 @@ func (c *collection) save( return cid.Undef, client.NewErrFieldNotExist(k) } + // by default the type will have been set to LWW_REGISTER. We need to ensure + // that it's set to the same as the field description CRDT type. + val.SetType(fieldDescription.Typ) + relationFieldDescription, isSecondaryRelationID := c.isSecondaryIDField(fieldDescription) if isSecondaryRelationID { primaryId := val.Value().(string) @@ -967,14 +973,21 @@ func (c *collection) save( return cid.Undef, err } - node, _, err := c.saveFieldToMerkleCRDT(ctx, txn, fieldKey, val) + merkleCRDT, err := merklecrdt.InstanceWithStore( + txn, + core.NewCollectionSchemaVersionKey(c.Schema().VersionID, c.ID()), + val.Type(), + fieldDescription.Kind, + fieldKey, + fieldDescription.Name, + ) if err != nil { return cid.Undef, err } - if val.IsDelete() { - docProperties[k] = nil - } else { - docProperties[k] = val.Value() + + node, _, err := merkleCRDT.Save(ctx, val) + if err != nil { + return cid.Undef, err } link := core.DAGLink{ @@ -984,21 +997,11 @@ func (c *collection) save( links = append(links, link) } } - // Update CompositeDAG - em, err := cbor.CanonicalEncOptions().EncMode() - if err != nil { - return cid.Undef, err - } - buf, err := em.Marshal(docProperties) - if err != nil { - return cid.Undef, nil - } headNode, priority, err := c.saveCompositeToMerkleCRDT( ctx, txn, primaryKey.ToDataStoreKey(), - buf, links, client.Active, ) @@ -1172,59 +1175,10 @@ func (c *collection) exists( return true, false, nil } -func (c *collection) saveFieldToMerkleCRDT( - ctx context.Context, - txn datastore.Txn, - dsKey core.DataStoreKey, - val client.Value, -) (ipld.Node, uint64, error) { - switch val.Type() { - case client.LWW_REGISTER: - wval, ok := val.(client.WriteableValue) - if !ok { - return nil, 0, client.ErrValueTypeMismatch - } - var bytes []byte - var err error - if val.IsDelete() { // empty byte array - bytes = []byte{} - } else { - bytes, err = wval.Bytes() - if err != nil { - return nil, 0, err - } - } - - fieldID, err := strconv.Atoi(dsKey.FieldId) - if err != nil { - return nil, 0, err - } - - schema := c.Schema() - - field, ok := c.Description().GetFieldByID(client.FieldID(fieldID), &schema) - if !ok { - return nil, 0, client.NewErrFieldIndexNotExist(fieldID) - } - - merkleCRDT := merklecrdt.NewMerkleLWWRegister( - txn, - core.NewCollectionSchemaVersionKey(schema.VersionID, c.ID()), - dsKey, - field.Name, - ) - - return merkleCRDT.Set(ctx, bytes) - default: - return nil, 0, client.NewErrUnknownCRDT(val.Type()) - } -} - func (c *collection) saveCompositeToMerkleCRDT( ctx context.Context, txn datastore.Txn, dsKey core.DataStoreKey, - buf []byte, links []core.DAGLink, status client.DocumentStatus, ) (ipld.Node, uint64, error) { @@ -1240,7 +1194,7 @@ func (c *collection) saveCompositeToMerkleCRDT( return merkleCRDT.Delete(ctx, links) } - return merkleCRDT.Set(ctx, buf, links) + return merkleCRDT.Save(ctx, links) } // getTxn gets or creates a new transaction from the underlying db. diff --git a/db/collection_delete.go b/db/collection_delete.go index 6dbf5dfa5e..f91b8e38f2 100644 --- a/db/collection_delete.go +++ b/db/collection_delete.go @@ -265,7 +265,6 @@ func (c *collection) applyDelete( ctx, txn, dsKey, - []byte{}, dagLinks, client.Deleted, ) diff --git a/db/errors.go b/db/errors.go index db6a139b06..37695097b0 100644 --- a/db/errors.go +++ b/db/errors.go @@ -42,7 +42,6 @@ const ( errDuplicateField string = "duplicate field" errCannotMutateField string = "mutating an existing field is not supported" errCannotMoveField string = "moving fields is not currently supported" - errInvalidCRDTType string = "only default or LWW (last writer wins) CRDT types are supported" errCannotDeleteField string = "deleting an existing field is not supported" errFieldKindNotFound string = "no type found for given name" errFieldKindDoesNotMatchFieldSchema string = "field Kind does not match field Schema" @@ -374,14 +373,6 @@ func NewErrCannotMoveField(name string, proposedIndex, existingIndex int) error ) } -func NewErrInvalidCRDTType(name string, crdtType client.CType) error { - return errors.New( - errInvalidCRDTType, - errors.NewKV("Name", name), - errors.NewKV("CRDTType", crdtType), - ) -} - func NewErrCannotDeleteField(name string, id client.FieldID) error { return errors.New( errCannotDeleteField, diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index 3b2bd1f996..d468d9094f 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -412,9 +412,9 @@ func createIndexIterator( switch op { case opEq, opGt, opGe, opLt, opLe, opNe: - writableValue := client.NewCBORValue(client.LWW_REGISTER, filterVal) + fieldValue := client.NewFieldValue(client.LWW_REGISTER, filterVal) - valueBytes, err := writableValue.Bytes() + valueBytes, err := fieldValue.Bytes() if err != nil { return nil, err } @@ -490,8 +490,8 @@ func createIndexIterator( } valArr := make([][]byte, 0, len(inArr)) for _, v := range inArr { - writableValue := client.NewCBORValue(client.LWW_REGISTER, v) - valueBytes, err := writableValue.Bytes() + fieldValue := client.NewFieldValue(client.LWW_REGISTER, v) + valueBytes, err := fieldValue.Bytes() if err != nil { return nil, err } diff --git a/db/fetcher/versioned.go b/db/fetcher/versioned.go index fc232bb9c7..c33f1a35da 100644 --- a/db/fetcher/versioned.go +++ b/db/fetcher/versioned.go @@ -339,7 +339,7 @@ func (vf *VersionedFetcher) merge(c cid.Cid) error { } // first arg 0 is the index for the composite DAG in the mCRDTs cache - if err := vf.processNode(0, nd, client.COMPOSITE, ""); err != nil { + if err := vf.processNode(0, nd, client.COMPOSITE, client.FieldKind_None, ""); err != nil { return err } @@ -361,9 +361,7 @@ func (vf *VersionedFetcher) merge(c cid.Cid) error { if !ok { return client.NewErrFieldNotExist(l.Name) } - // @todo: Right now we ONLY handle LWW_REGISTER, need to swith on this and - // get CType from descriptions - if err := vf.processNode(uint32(field.ID), subNd, client.LWW_REGISTER, l.Name); err != nil { + if err := vf.processNode(uint32(field.ID), subNd, field.Typ, field.Kind, l.Name); err != nil { return err } } @@ -375,6 +373,7 @@ func (vf *VersionedFetcher) processNode( crdtIndex uint32, nd format.Node, ctype client.CType, + kind client.FieldKind, fieldName string, ) (err error) { // handle CompositeDAG @@ -388,6 +387,7 @@ func (vf *VersionedFetcher) processNode( vf.store, core.CollectionSchemaVersionKey{}, ctype, + kind, dsKey, fieldName, ) diff --git a/db/index.go b/db/index.go index 693df4a5f1..aa8a56bcd2 100644 --- a/db/index.go +++ b/db/index.go @@ -122,16 +122,15 @@ func (i *collectionBaseIndex) getDocFieldValue(doc *client.Document) ([]byte, er fieldVal, err := doc.GetValue(indexedFieldName) if err != nil { if errors.Is(err, client.ErrFieldNotExist) { - return client.NewCBORValue(client.LWW_REGISTER, nil).Bytes() + return client.NewFieldValue(client.LWW_REGISTER, nil).Bytes() } else { return nil, err } } - writeableVal, ok := fieldVal.(client.WriteableValue) - if !ok || !i.validateFieldFunc(fieldVal.Value()) { - return nil, NewErrInvalidFieldValue(i.fieldDesc.Kind, writeableVal) + if !i.validateFieldFunc(fieldVal.Value()) { + return nil, NewErrInvalidFieldValue(i.fieldDesc.Kind, fieldVal) } - return writeableVal.Bytes() + return fieldVal.Bytes() } func (i *collectionBaseIndex) getDocumentsIndexKey( diff --git a/db/indexed_docs_test.go b/db/indexed_docs_test.go index a820b78b30..b7c7abbf9d 100644 --- a/db/indexed_docs_test.go +++ b/db/indexed_docs_test.go @@ -159,17 +159,15 @@ func (b *indexKeyBuilder) Build() core.IndexDataStoreKey { if b.doc != nil { var fieldBytesVal []byte - var writeableVal client.WriteableValue + var fieldValue *client.FieldValue + var err error if len(b.values) == 0 { - fieldVal, err := b.doc.GetValue(b.fieldName) + fieldValue, err = b.doc.GetValue(b.fieldName) require.NoError(b.f.t, err) - var ok bool - writeableVal, ok = fieldVal.(client.WriteableValue) - require.True(b.f.t, ok) } else { - writeableVal = client.NewCBORValue(client.LWW_REGISTER, b.values[0]) + fieldValue = client.NewFieldValue(client.LWW_REGISTER, b.values[0]) } - fieldBytesVal, err = writeableVal.Bytes() + fieldBytesVal, err = fieldValue.Bytes() require.NoError(b.f.t, err) key.FieldValues = [][]byte{fieldBytesVal} diff --git a/docs/data_format_changes/i2115-add-pn-counter-crdt.md b/docs/data_format_changes/i2115-add-pn-counter-crdt.md new file mode 100644 index 0000000000..bbf53af7d8 --- /dev/null +++ b/docs/data_format_changes/i2115-add-pn-counter-crdt.md @@ -0,0 +1,3 @@ +# Change CRDT encoded data struct fields + +The composite CRDT delta struct no longer hosts the changed properties of the document. The removes the leakage of field level values for when we implement field level access control. diff --git a/examples/schema/user.graphql b/examples/schema/user.graphql index 13957db4a4..9390a28f64 100644 --- a/examples/schema/user.graphql +++ b/examples/schema/user.graphql @@ -2,5 +2,5 @@ type User { name: String age: Int verified: Boolean - points: Float + points: Int @crdt(type: "pncounter") } diff --git a/http/handler_collection.go b/http/handler_collection.go index d5b4ca04f3..8595fbb267 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -41,35 +41,27 @@ type CollectionUpdateRequest struct { func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { col := req.Context().Value(colContextKey).(client.Collection) - var body any - if err := requestJSON(req, &body); err != nil { + data, err := io.ReadAll(req.Body) + if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } - switch t := body.(type) { - case []any: - var docList []*client.Document - for _, v := range t { - docMap, ok := v.(map[string]any) - if !ok { - responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) - return - } - doc, err := client.NewDocFromMap(docMap, col.Schema()) - if err != nil { - responseJSON(rw, http.StatusBadRequest, errorResponse{err}) - return - } - docList = append(docList, doc) + switch { + case client.IsJSONArray(data): + docList, err := client.NewDocsFromJSON(data, col.Schema()) + if err != nil { + responseJSON(rw, http.StatusBadRequest, errorResponse{err}) + return } + if err := col.CreateMany(req.Context(), docList); err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return } rw.WriteHeader(http.StatusOK) - case map[string]any: - doc, err := client.NewDocFromMap(t, col.Schema()) + default: + doc, err := client.NewDocFromJSON(data, col.Schema()) if err != nil { responseJSON(rw, http.StatusBadRequest, errorResponse{err}) return @@ -79,8 +71,6 @@ func (s *collectionHandler) Create(rw http.ResponseWriter, req *http.Request) { return } rw.WriteHeader(http.StatusOK) - default: - responseJSON(rw, http.StatusBadRequest, errorResponse{ErrInvalidRequestBody}) } } diff --git a/merkle/clock/clock_test.go b/merkle/clock/clock_test.go index 311d990952..763c27ed4f 100644 --- a/merkle/clock/clock_test.go +++ b/merkle/clock/clock_test.go @@ -59,9 +59,8 @@ func TestNewMerkleClock(t *testing.T) { func TestMerkleClockPutBlock(t *testing.T) { ctx := context.Background() clk := newTestMerkleClock() - delta := &crdt.LWWRegDelta{ - Data: []byte("test"), - } + reg := crdt.LWWRegister{} + delta := reg.Set([]byte("test")) node, err := clk.putBlock(ctx, nil, delta) if err != nil { t.Errorf("Failed to putBlock, err: %v", err) @@ -80,9 +79,8 @@ func TestMerkleClockPutBlock(t *testing.T) { func TestMerkleClockPutBlockWithHeads(t *testing.T) { ctx := context.Background() clk := newTestMerkleClock() - delta := &crdt.LWWRegDelta{ - Data: []byte("test"), - } + reg := crdt.LWWRegister{} + delta := reg.Set([]byte("test")) c, err := ccid.NewSHA256CidV1([]byte("Hello World!")) if err != nil { t.Error("Failed to create new head CID:", err) @@ -103,9 +101,8 @@ func TestMerkleClockPutBlockWithHeads(t *testing.T) { func TestMerkleClockAddDAGNode(t *testing.T) { ctx := context.Background() clk := newTestMerkleClock() - delta := &crdt.LWWRegDelta{ - Data: []byte("test"), - } + reg := crdt.LWWRegister{} + delta := reg.Set([]byte("test")) _, err := clk.AddDAGNode(ctx, delta) if err != nil { @@ -117,9 +114,8 @@ func TestMerkleClockAddDAGNode(t *testing.T) { func TestMerkleClockAddDAGNodeWithHeads(t *testing.T) { ctx := context.Background() clk := newTestMerkleClock() - delta := &crdt.LWWRegDelta{ - Data: []byte("test1"), - } + reg := crdt.LWWRegister{} + delta := reg.Set([]byte("test")) _, err := clk.AddDAGNode(ctx, delta) if err != nil { @@ -127,9 +123,8 @@ func TestMerkleClockAddDAGNodeWithHeads(t *testing.T) { return } - delta2 := &crdt.LWWRegDelta{ - Data: []byte("test2"), - } + reg2 := crdt.LWWRegister{} + delta2 := reg2.Set([]byte("test2")) _, err = clk.AddDAGNode(ctx, delta2) if err != nil { diff --git a/merkle/crdt/composite.go b/merkle/crdt/composite.go index f837ac3ef7..ee43348bdc 100644 --- a/merkle/crdt/composite.go +++ b/merkle/crdt/composite.go @@ -60,7 +60,7 @@ func (m *MerkleCompositeDAG) Delete( // Set() call on underlying CompositeDAG CRDT // persist/publish delta log.Debug(ctx, "Applying delta-mutator 'Delete' on CompositeDAG") - delta := m.reg.Set([]byte{}, links) + delta := m.reg.Set(links) delta.Status = client.Deleted nd, err := m.clock.AddDAGNode(ctx, delta) if err != nil { @@ -70,16 +70,16 @@ func (m *MerkleCompositeDAG) Delete( return nd, delta.GetPriority(), nil } -// Set sets the values of CompositeDAG. The value is always the object from the mutation operations. -func (m *MerkleCompositeDAG) Set( - ctx context.Context, - patch []byte, - links []core.DAGLink, -) (ipld.Node, uint64, error) { +// Save the value of the composite CRDT to DAG. +func (m *MerkleCompositeDAG) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { + value, ok := data.([]core.DAGLink) + if !ok { + return nil, 0, NewErrUnexpectedValueType(client.COMPOSITE, []core.DAGLink{}, data) + } // Set() call on underlying CompositeDAG CRDT // persist/publish delta log.Debug(ctx, "Applying delta-mutator 'Set' on CompositeDAG") - delta := m.reg.Set(patch, links) + delta := m.reg.Set(value) nd, err := m.clock.AddDAGNode(ctx, delta) if err != nil { return nil, 0, err diff --git a/merkle/crdt/errors.go b/merkle/crdt/errors.go new file mode 100644 index 0000000000..9e828df5dc --- /dev/null +++ b/merkle/crdt/errors.go @@ -0,0 +1,35 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package merklecrdt + +import ( + "fmt" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/errors" +) + +const ( + errUnexpectedValueType = "unexpected value type for merkle CRDT" +) + +var ( + ErrUnexpectedValueType = errors.New(errUnexpectedValueType) +) + +func NewErrUnexpectedValueType(cType client.CType, expected, actual any) error { + return errors.New( + errUnexpectedValueType, + errors.NewKV("CRDT", cType.String()), + errors.NewKV("expected", fmt.Sprintf("%T", expected)), + errors.NewKV("actual", fmt.Sprintf("%T", actual)), + ) +} diff --git a/merkle/crdt/lwwreg.go b/merkle/crdt/lwwreg.go index 8b47492b26..901d458c53 100644 --- a/merkle/crdt/lwwreg.go +++ b/merkle/crdt/lwwreg.go @@ -15,6 +15,7 @@ import ( ipld "github.com/ipfs/go-ipld-format" + "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/core" corecrdt "github.com/sourcenetwork/defradb/core/crdt" "github.com/sourcenetwork/defradb/merkle/clock" @@ -44,11 +45,19 @@ func NewMerkleLWWRegister( } } -// Set the value of the register. -func (mlwwreg *MerkleLWWRegister) Set(ctx context.Context, value []byte) (ipld.Node, uint64, error) { +// Save the value of the register to the DAG. +func (mlwwreg *MerkleLWWRegister) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { + value, ok := data.(*client.FieldValue) + if !ok { + return nil, 0, NewErrUnexpectedValueType(client.LWW_REGISTER, &client.FieldValue{}, data) + } + bytes, err := value.Bytes() + if err != nil { + return nil, 0, err + } // Set() call on underlying LWWRegister CRDT // persist/publish delta - delta := mlwwreg.reg.Set(value) + delta := mlwwreg.reg.Set(bytes) nd, err := mlwwreg.clock.AddDAGNode(ctx, delta) return nd, delta.GetPriority(), err } diff --git a/merkle/crdt/merklecrdt.go b/merkle/crdt/merklecrdt.go index 07fb83e436..ba7fd5648d 100644 --- a/merkle/crdt/merklecrdt.go +++ b/merkle/crdt/merklecrdt.go @@ -40,10 +40,9 @@ type Stores interface { type MerkleCRDT interface { core.ReplicatedData Clock() core.MerkleClock + Save(ctx context.Context, data any) (ipld.Node, uint64, error) } -var _ core.ReplicatedData = (*baseMerkleCRDT)(nil) - // baseMerkleCRDT handles the MerkleCRDT overhead functions that aren't CRDT specific like the mutations and state // retrieval functions. It handles creating and publishing the CRDT DAG with the help of the MerkleClock. type baseMerkleCRDT struct { @@ -51,6 +50,8 @@ type baseMerkleCRDT struct { crdt core.ReplicatedData } +var _ core.ReplicatedData = (*baseMerkleCRDT)(nil) + func (base *baseMerkleCRDT) Clock() core.MerkleClock { return base.clock } @@ -71,6 +72,7 @@ func InstanceWithStore( store Stores, schemaVersionKey core.CollectionSchemaVersionKey, ctype client.CType, + kind client.FieldKind, key core.DataStoreKey, fieldName string, ) (MerkleCRDT, error) { @@ -82,6 +84,23 @@ func InstanceWithStore( key, fieldName, ), nil + case client.PN_COUNTER: + switch kind { + case client.FieldKind_INT: + return NewMerklePNCounter[int64]( + store, + schemaVersionKey, + key, + fieldName, + ), nil + case client.FieldKind_FLOAT: + return NewMerklePNCounter[float64]( + store, + schemaVersionKey, + key, + fieldName, + ), nil + } case client.COMPOSITE: return NewMerkleCompositeDAG( store, diff --git a/merkle/crdt/merklecrdt_test.go b/merkle/crdt/merklecrdt_test.go index 47537add09..08bd26dc77 100644 --- a/merkle/crdt/merklecrdt_test.go +++ b/merkle/crdt/merklecrdt_test.go @@ -16,12 +16,10 @@ import ( "github.com/ipfs/go-cid" ds "github.com/ipfs/go-datastore" - "github.com/ipfs/go-datastore/query" "github.com/sourcenetwork/defradb/core" - corecrdt "github.com/sourcenetwork/defradb/core/crdt" + crdt "github.com/sourcenetwork/defradb/core/crdt" "github.com/sourcenetwork/defradb/datastore" - "github.com/sourcenetwork/defradb/logging" "github.com/sourcenetwork/defradb/merkle/clock" ) @@ -33,17 +31,16 @@ func newTestBaseMerkleCRDT() (*baseMerkleCRDT, datastore.DSReaderWriter) { s := newDS() multistore := datastore.MultiStoreFrom(s) - reg := corecrdt.NewLWWRegister(multistore.Datastore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "") + reg := crdt.NewLWWRegister(multistore.Datastore(), core.CollectionSchemaVersionKey{}, core.DataStoreKey{}, "") clk := clock.NewMerkleClock(multistore.Headstore(), multistore.DAGstore(), core.HeadStoreKey{}, reg) return &baseMerkleCRDT{clock: clk, crdt: reg}, multistore.Rootstore() } func TestMerkleCRDTPublish(t *testing.T) { ctx := context.Background() - bCRDT, store := newTestBaseMerkleCRDT() - delta := &corecrdt.LWWRegDelta{ - Data: []byte("test"), - } + bCRDT, _ := newTestBaseMerkleCRDT() + reg := crdt.LWWRegister{} + delta := reg.Set([]byte("test")) nd, err := bCRDT.clock.AddDAGNode(ctx, delta) if err != nil { @@ -55,25 +52,4 @@ func TestMerkleCRDTPublish(t *testing.T) { t.Error("Published returned invalid CID Undef:", nd.Cid()) return } - - printStore(ctx, store) -} - -func printStore(ctx context.Context, store datastore.DSReaderWriter) { - q := query.Query{ - Prefix: "", - KeysOnly: false, - } - - results, err := store.Query(ctx, q) - - if err != nil { - panic(err) - } - - defer results.Close() - - for r := range results.Next() { - log.Info(ctx, "", logging.NewKV(r.Key, r.Value)) - } } diff --git a/merkle/crdt/pncounter.go b/merkle/crdt/pncounter.go new file mode 100644 index 0000000000..2cadbee23c --- /dev/null +++ b/merkle/crdt/pncounter.go @@ -0,0 +1,57 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package merklecrdt + +import ( + "context" + + ipld "github.com/ipfs/go-ipld-format" + + "github.com/sourcenetwork/defradb/client" + "github.com/sourcenetwork/defradb/core" + "github.com/sourcenetwork/defradb/core/crdt" + "github.com/sourcenetwork/defradb/merkle/clock" +) + +// MerklePNCounter is a MerkleCRDT implementation of the PNCounter using MerkleClocks. +type MerklePNCounter[T crdt.Incrementable] struct { + *baseMerkleCRDT + + reg crdt.PNCounter[T] +} + +// NewMerklePNCounter creates a new instance (or loaded from DB) of a MerkleCRDT +// backed by a PNCounter CRDT. +func NewMerklePNCounter[T crdt.Incrementable]( + store Stores, + schemaVersionKey core.CollectionSchemaVersionKey, + key core.DataStoreKey, + fieldName string, +) *MerklePNCounter[T] { + register := crdt.NewPNCounter[T](store.Datastore(), schemaVersionKey, key, fieldName) + clk := clock.NewMerkleClock(store.Headstore(), store.DAGstore(), key.ToHeadStoreKey(), register) + base := &baseMerkleCRDT{clock: clk, crdt: register} + return &MerklePNCounter[T]{ + baseMerkleCRDT: base, + reg: register, + } +} + +// Save the value of the PN Counter to the DAG. +func (mPNC *MerklePNCounter[T]) Save(ctx context.Context, data any) (ipld.Node, uint64, error) { + value, ok := data.(*client.FieldValue) + if !ok { + return nil, 0, NewErrUnexpectedValueType(client.PN_COUNTER, &client.FieldValue{}, data) + } + delta := mPNC.reg.Increment(value.Value().(T)) + nd, err := mPNC.clock.AddDAGNode(ctx, delta) + return nd, delta.GetPriority(), err +} diff --git a/net/process.go b/net/process.go index 38a5a077fb..4e6ecee19e 100644 --- a/net/process.go +++ b/net/process.go @@ -158,12 +158,14 @@ func initCRDTForType( key = base.MakeDataStoreKeyWithCollectionDescription(description).WithInstanceInfo(dsKey).WithFieldId(fieldID) log.Debug(ctx, "Got CRDT Type", logging.NewKV("CType", ctype), logging.NewKV("Field", field)) - return merklecrdt.NewMerkleLWWRegister( + return merklecrdt.InstanceWithStore( txn, core.NewCollectionSchemaVersionKey(col.Schema().VersionID, col.ID()), + ctype, + fd.Kind, key, field, - ), nil + ) } func decodeBlockBuffer(buf []byte, cid cid.Cid) (ipld.Node, error) { diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index bd0934d437..f386c752ed 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -363,10 +363,15 @@ func fieldsFromAST(field *ast.FieldDefinition, } } + cType, err := setCRDTType(field, kind) + if err != nil { + return nil, err + } + fieldDescription := client.FieldDescription{ Name: field.Name.Value, Kind: kind, - Typ: defaultCRDTForFieldKind[kind], + Typ: cType, Schema: schema, RelationName: relationName, RelationType: relationType, @@ -376,6 +381,29 @@ func fieldsFromAST(field *ast.FieldDefinition, return fieldDescriptions, nil } +func setCRDTType(field *ast.FieldDefinition, kind client.FieldKind) (client.CType, error) { + if directive, exists := findDirective(field, "crdt"); exists { + for _, arg := range directive.Arguments { + switch arg.Name.Value { + case "type": + cType := arg.Value.GetValue().(string) + switch cType { + case client.PN_COUNTER.String(): + if !client.PN_COUNTER.IsCompatibleWith(kind) { + return 0, client.NewErrCRDTKindMismatch(cType, kind.String()) + } + return client.PN_COUNTER, nil + case client.LWW_REGISTER.String(): + return client.LWW_REGISTER, nil + default: + return 0, client.NewErrInvalidCRDTType(field.Name.Value, cType) + } + } + } + } + return defaultCRDTForFieldKind[kind], nil +} + func astTypeToKind(t ast.Type) (client.FieldKind, error) { const ( typeID string = "ID" diff --git a/tests/gen/cli/util_test.go b/tests/gen/cli/util_test.go index 07f027ef7a..32d3c716f6 100644 --- a/tests/gen/cli/util_test.go +++ b/tests/gen/cli/util_test.go @@ -1,4 +1,4 @@ -// Copyright 2022 Democratized Data Foundation +// Copyright 2023 Democratized Data Foundation // // Use of this software is governed by the Business Source License // included in the file licenses/BSL.txt. @@ -63,7 +63,7 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { return nil, errors.Wrap("failed to create database", err) } - server, err := httpapi.NewServer(db) + server, err := httpapi.NewServer(db, httpapi.WithAddress(cfg.API.Address)) if err != nil { return nil, errors.Wrap("failed to create http server", err) } @@ -72,6 +72,7 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { } // save the address on the config in case the port number was set to random cfg.API.Address = server.AssignedAddr() + cfg.Persist() // run the server in a separate goroutine go func(apiAddress string) { @@ -92,8 +93,11 @@ func start(ctx context.Context, cfg *config.Config) (*defraInstance, error) { func getTestConfig(t *testing.T) *config.Config { cfg := config.DefaultConfig() cfg.Datastore.Store = "memory" - cfg.Datastore.Badger.Path = t.TempDir() cfg.Net.P2PDisabled = true + cfg.Rootdir = t.TempDir() + cfg.Net.P2PAddress = "/ip4/127.0.0.1/tcp/0" + cfg.API.Address = "127.0.0.1:0" + cfg.Persist() return cfg } diff --git a/tests/integration/events/simple/with_update_test.go b/tests/integration/events/simple/with_update_test.go index 8e91ac231e..c929bf4384 100644 --- a/tests/integration/events/simple/with_update_test.go +++ b/tests/integration/events/simple/with_update_test.go @@ -66,14 +66,14 @@ func TestEventsSimpleWithUpdate(t *testing.T) { ExpectedUpdates: []testUtils.ExpectedUpdate{ { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeicbv34oa4hfcnqbka3jqnby4g75ttlj4wfvc7zhvat5xca45ggq2u"), + Cid: immutable.Some("bafybeif5l2a5f2lcsmuml2cji6unq4qk2ta4f3uow4wccdjebsu7jcjrj4"), }, { DocID: immutable.Some(docID2), }, { DocID: immutable.Some(docID1), - Cid: immutable.Some("bafybeiep6f7sls7z325oqd5oddigxq3fkxwpp5b7um47yz5erxfybjd6ra"), + Cid: immutable.Some("bafybeihchzitl7e7pyhci5bs563dn3seykcleqk56r7vjtslvi3rv3wsne"), }, }, } diff --git a/tests/integration/mutation/create/crdt/pncounter_test.go b/tests/integration/mutation/create/crdt/pncounter_test.go new file mode 100644 index 0000000000..592e01bebb --- /dev/null +++ b/tests/integration/mutation/create/crdt/pncounter_test.go @@ -0,0 +1,55 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package create + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestPNCounterCreate_IntKindWithPositiveValue_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Document creation with PN Counter", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(10), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index 1d98ead005..9749119c60 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -39,7 +39,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { { "_version": []map[string]any{ { - "cid": "bafybeicbv34oa4hfcnqbka3jqnby4g75ttlj4wfvc7zhvat5xca45ggq2u", + "cid": "bafybeif5l2a5f2lcsmuml2cji6unq4qk2ta4f3uow4wccdjebsu7jcjrj4", }, }, }, diff --git a/tests/integration/mutation/update/crdt/pncounter_test.go b/tests/integration/mutation/update/crdt/pncounter_test.go new file mode 100644 index 0000000000..fb5f30613e --- /dev/null +++ b/tests/integration/mutation/update/crdt/pncounter_test.go @@ -0,0 +1,211 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package update + +import ( + "fmt" + "math" + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestPNCounterUpdate_IntKindWithPositiveIncrement_ShouldIncrement(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a PN Counter with Int type. +func TestPNCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt64(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Int type causing overflow behaviour", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %d + }`, math.MaxInt64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 1 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(math.MinInt64), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestPNCounterUpdate_FloatKindWithPositiveIncrement_ShouldIncrement(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Float type. Note the lack of precision", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10.1 + }`, + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 10.2 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + // Note the lack of precision of float types. + "points": 20.299999999999997, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +// This test documents what happens when an overflow occurs in a PN Counter with Float type. +// In this case it is the same as a no-op. +func TestPNCounterUpdate_FloatKindWithPositiveIncrementOverflow_NoOp(t *testing.T) { + test := testUtils.TestCase{ + Description: "Positive increments of a PN Counter with Float type and overflow causing a no-op", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: fmt.Sprintf(`{ + "name": "John", + "points": %g + }`, math.MaxFloat64), + }, + testUtils.UpdateDoc{ + DocID: 0, + Doc: `{ + "points": 1000 + }`, + }, + testUtils.Request{ + Request: `query { + Users { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": math.MaxFloat64, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/peer/crdt/pncounter_test.go b/tests/integration/net/state/simple/peer/crdt/pncounter_test.go new file mode 100644 index 0000000000..f65f4bd6db --- /dev/null +++ b/tests/integration/net/state/simple/peer/crdt/pncounter_test.go @@ -0,0 +1,69 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package peer_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2PUpdate_WithPNCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // Create Shahzad on all nodes + Doc: `{ + "name": "Shahzad", + "points": 10 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + DocID: 0, + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/peer_replicator/crdt/pncounter_test.go b/tests/integration/net/state/simple/peer_replicator/crdt/pncounter_test.go new file mode 100644 index 0000000000..40eba568ba --- /dev/null +++ b/tests/integration/net/state/simple/peer_replicator/crdt/pncounter_test.go @@ -0,0 +1,160 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package peer_replicator_test + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2PPeerReplicatorWithCreate_PNCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 0 + }`, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 2, + }, + testUtils.ConnectPeers{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.CreateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "name": "Shahzad", + "points": 3000 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + NodeID: immutable.Some(0), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + { + "points": int64(3000), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(1), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + }, + }, + testUtils.Request{ + NodeID: immutable.Some(2), + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(0), + }, + { + "points": int64(3000), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestP2PPeerReplicatorWithUpdate_PNCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 1, + TargetNodeID: 0, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 2, + }, + testUtils.UpdateDoc{ + // Update John's points on the first node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/net/state/simple/replicator/crdt/pncounter_test.go b/tests/integration/net/state/simple/replicator/crdt/pncounter_test.go new file mode 100644 index 0000000000..186a524e4e --- /dev/null +++ b/tests/integration/net/state/simple/replicator/crdt/pncounter_test.go @@ -0,0 +1,71 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package replicator + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestP2POneToOneReplicatorUpdate_PNCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // This document is created in first node before the replicator is set up. + // Updates should be synced across nodes. + NodeID: immutable.Some(0), + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.ConfigureReplicator{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + // Update John's points on the first node only, and allow the value to sync + NodeID: immutable.Some(0), + Doc: `{ + "points": 10 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + points + } + }`, + Results: []map[string]any{ + { + "points": int64(20), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/commits/simple_test.go b/tests/integration/query/commits/simple_test.go index a0dd120ec4..9592c46d1f 100644 --- a/tests/integration/query/commits/simple_test.go +++ b/tests/integration/query/commits/simple_test.go @@ -36,13 +36,13 @@ func TestQueryCommits(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -79,22 +79,22 @@ func TestQueryCommitsMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeifnoeodhrvpimwnuwcxmz2fxci6cwrw5ck5vo5n6rkkdt47hepyhm", + "cid": "bafybeigdcaas33fnrv7jbigm5a5phxtxl76weuf74kqcrb5udjgttqssju", }, { - "cid": "bafybeihx6t43wc23xzak7raultfzpvnetrsi7vhzglray3r7k4gdksbuk4", + "cid": "bafybeiahfq2ji7uneqfqddeqsvz5t3rdkgo7wpnpswo2jon23kxpgvqdsa", }, { - "cid": "bafybeicvpe4oyfrgcuhf2eqqgp2iwuifgl73d6jo4pdlg3x3vqmnusgxv4", + "cid": "bafybeihhadjgfxsyrlg5gftmi4ikppuhecyeqznjru47l3tup4c6sbzhga", }, { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -125,15 +125,15 @@ func TestQueryCommitsWithSchemaVersionIdField(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, diff --git a/tests/integration/query/commits/with_cid_test.go b/tests/integration/query/commits/with_cid_test.go index d34b5c7f0f..5c2703d41c 100644 --- a/tests/integration/query/commits/with_cid_test.go +++ b/tests/integration/query/commits/with_cid_test.go @@ -38,14 +38,14 @@ func TestQueryCommitsWithCid(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi" + cid: "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -71,14 +71,14 @@ func TestQueryCommitsWithCidForFieldCommit(t *testing.T) { testUtils.Request{ Request: `query { commits( - cid: "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi" + cid: "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, diff --git a/tests/integration/query/commits/with_depth_test.go b/tests/integration/query/commits/with_depth_test.go index 8977a84bbb..a0d2c817f4 100644 --- a/tests/integration/query/commits/with_depth_test.go +++ b/tests/integration/query/commits/with_depth_test.go @@ -36,13 +36,13 @@ func TestQueryCommitsWithDepth1(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -81,16 +81,16 @@ func TestQueryCommitsWithDepth1WithUpdate(t *testing.T) { Results: []map[string]any{ { // "Age" field head - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, }, @@ -137,27 +137,27 @@ func TestQueryCommitsWithDepth2WithUpdate(t *testing.T) { Results: []map[string]any{ { // Composite head - "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", + "cid": "bafybeihvhr7ke7bjgjixce262544tlo7mdlyuswtgl66zsrxcfc5targjy", "height": int64(3), }, { // Composite head -1 - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "height": int64(2), }, { // "Name" field head (unchanged from create) - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { // "Age" field head - "cid": "bafybeieirgdstog2griwuuxgb4c3frgka55yoodjwdznraoieqcxfdijw4", + "cid": "bafybeicacrvck5qf37pk3pdsiavvxy2jk67dbdpww5pvoun2k52lw2ftqi", "height": int64(3), }, { // "Age" field head -1 - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, }, @@ -195,22 +195,22 @@ func TestQueryCommitsWithDepth1AndMultipleDocs(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeiasu5mdp6652oux4avwugv6gbd6ciqqsuj2zjv4ypksmiwndgwkeq", + "cid": "bafybeigvcksw7ck2o7rqfyxncn2h5u6bbwj5ejjfvsihsjibxvrqrxbtui", }, { - "cid": "bafybeia7shc4tpafpzblxqjyxmb7fayegsvaol3p2ucujaawig3wtopibu", + "cid": "bafybeibcdmghhshx4v3xamoktw3n6blv7courh6x2d5cttwuzlodml74ny", }, { - "cid": "bafybeifwn57hy5m5rddplfxdomes34ykck775yvinc522nowspkvawqr6q", + "cid": "bafybeig6rwkq6hlf5rcjq64jodl3gtfv5svnmsjlkwrnmbcjui7t3vy3qi", }, { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_cid_test.go b/tests/integration/query/commits/with_doc_id_cid_test.go index 2003158565..48878ae06c 100644 --- a/tests/integration/query/commits/with_doc_id_cid_test.go +++ b/tests/integration/query/commits/with_doc_id_cid_test.go @@ -104,14 +104,14 @@ func TestQueryCommitsWithDocIDAndCidWithUpdate(t *testing.T) { Request: ` { commits( docID: "bae-f54b9689-e06e-5e3a-89b3-f3aee8e64ca7", - cid: "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju" + cid: "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu" ) { cid } }`, Results: []map[string]any{ { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_count_test.go b/tests/integration/query/commits/with_doc_id_count_test.go index ba5b0eb589..abab180695 100644 --- a/tests/integration/query/commits/with_doc_id_count_test.go +++ b/tests/integration/query/commits/with_doc_id_count_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDAndLinkCount(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "_count": 0, }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "_count": 0, }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "_count": 2, }, }, diff --git a/tests/integration/query/commits/with_doc_id_field_test.go b/tests/integration/query/commits/with_doc_id_field_test.go index 87b6edb06c..3de42d2e42 100644 --- a/tests/integration/query/commits/with_doc_id_field_test.go +++ b/tests/integration/query/commits/with_doc_id_field_test.go @@ -118,7 +118,7 @@ func TestQueryCommitsWithDocIDAndFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, }, }, @@ -150,7 +150,7 @@ func TestQueryCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_limit_offset_test.go index e6a622aa3c..e56dfb6d85 100644 --- a/tests/integration/query/commits/with_doc_id_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_offset_test.go @@ -57,10 +57,10 @@ func TestQueryCommitsWithDocIDAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", + "cid": "bafybeihvhr7ke7bjgjixce262544tlo7mdlyuswtgl66zsrxcfc5targjy", }, { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_limit_test.go b/tests/integration/query/commits/with_doc_id_limit_test.go index 23b045b708..7c4a4b2fc8 100644 --- a/tests/integration/query/commits/with_doc_id_limit_test.go +++ b/tests/integration/query/commits/with_doc_id_limit_test.go @@ -50,10 +50,10 @@ func TestQueryCommitsWithDocIDAndLimit(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", + "cid": "bafybeihvhr7ke7bjgjixce262544tlo7mdlyuswtgl66zsrxcfc5targjy", }, { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", }, }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go index 118262d5c9..9312c66928 100644 --- a/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go +++ b/tests/integration/query/commits/with_doc_id_order_limit_offset_test.go @@ -58,11 +58,11 @@ func TestQueryCommitsWithDocIDAndOrderAndLimitAndOffset(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, { - "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", + "cid": "bafybeihvhr7ke7bjgjixce262544tlo7mdlyuswtgl66zsrxcfc5targjy", "height": int64(3), }, }, diff --git a/tests/integration/query/commits/with_doc_id_order_test.go b/tests/integration/query/commits/with_doc_id_order_test.go index 47f0ce3802..f89121e199 100644 --- a/tests/integration/query/commits/with_doc_id_order_test.go +++ b/tests/integration/query/commits/with_doc_id_order_test.go @@ -44,23 +44,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "height": int64(2), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "height": int64(1), }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "height": int64(1), }, }, @@ -99,23 +99,23 @@ func TestQueryCommitsWithDocIDAndOrderHeightAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "height": int64(1), }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "height": int64(1), }, { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "height": int64(2), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, }, @@ -154,24 +154,24 @@ func TestQueryCommitsWithDocIDAndOrderCidDesc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", - "height": int64(1), + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", + "height": int64(2), }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "height": int64(1), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", - "height": int64(2), + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", + "height": int64(1), }, { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", - "height": int64(2), + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", + "height": int64(1), }, { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", - "height": int64(1), + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", + "height": int64(2), }, }, }, @@ -209,25 +209,25 @@ func TestQueryCommitsWithDocIDAndOrderCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", - "height": int64(1), - }, - { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", - "height": int64(2), + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", + "height": int64(1), }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "height": int64(1), }, + { + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", + "height": int64(2), + }, }, }, }, @@ -278,39 +278,39 @@ func TestQueryCommitsWithDocIDAndOrderAndMultiUpdatesCidAsc(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "height": int64(1), }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "height": int64(1), }, { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "height": int64(2), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, { - "cid": "bafybeifj3dw2wehaabwmrkcmebj3xyyujlp32sycydd3wfjszx3bfxglfu", + "cid": "bafybeihvhr7ke7bjgjixce262544tlo7mdlyuswtgl66zsrxcfc5targjy", "height": int64(3), }, { - "cid": "bafybeieirgdstog2griwuuxgb4c3frgka55yoodjwdznraoieqcxfdijw4", + "cid": "bafybeicacrvck5qf37pk3pdsiavvxy2jk67dbdpww5pvoun2k52lw2ftqi", "height": int64(3), }, { - "cid": "bafybeidoph22zh2c4kh2tx5qbg62nbrulvald6w5hgvp5x5rjurdbz3ibi", + "cid": "bafybeicv72yzbkdmp5r32eesxcna7rqyuhwoovg66kkivclzji3onbwm3a", "height": int64(4), }, { - "cid": "bafybeiacs2yvfbjgk3xfz5zgt43gswo4jhreieenwkb4whpstjas5cpbdy", + "cid": "bafybeicf36fznyghq3spknjabxrp72kf66khrzscco3rnyat3ezaufhon4", "height": int64(4), }, }, diff --git a/tests/integration/query/commits/with_doc_id_test.go b/tests/integration/query/commits/with_doc_id_test.go index b69f278c10..c754a18fde 100644 --- a/tests/integration/query/commits/with_doc_id_test.go +++ b/tests/integration/query/commits/with_doc_id_test.go @@ -62,13 +62,13 @@ func TestQueryCommitsWithDocID(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -102,22 +102,22 @@ func TestQueryCommitsWithDocIDAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "links": []map[string]any{}, }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "links": []map[string]any{}, }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "links": []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "name": "age", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "name": "name", }, }, @@ -158,23 +158,23 @@ func TestQueryCommitsWithDocIDAndUpdate(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "height": int64(2), }, { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "height": int64(1), }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "height": int64(1), }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "height": int64(2), }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "height": int64(1), }, }, @@ -219,44 +219,44 @@ func TestQueryCommitsWithDocIDAndUpdateAndLinks(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "links": []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "name": "_head", }, }, }, { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "links": []map[string]any{}, }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "links": []map[string]any{}, }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", "links": []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "name": "_head", }, { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", "name": "age", }, }, }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "links": []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "name": "age", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "name": "name", }, }, diff --git a/tests/integration/query/commits/with_doc_id_typename_test.go b/tests/integration/query/commits/with_doc_id_typename_test.go index 4c360c297e..8248724cd8 100644 --- a/tests/integration/query/commits/with_doc_id_typename_test.go +++ b/tests/integration/query/commits/with_doc_id_typename_test.go @@ -37,15 +37,15 @@ func TestQueryCommitsWithDocIDWithTypeName(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "__typename": "Commit", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "__typename": "Commit", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "__typename": "Commit", }, }, diff --git a/tests/integration/query/commits/with_field_test.go b/tests/integration/query/commits/with_field_test.go index f8cd4e961f..45b0acf550 100644 --- a/tests/integration/query/commits/with_field_test.go +++ b/tests/integration/query/commits/with_field_test.go @@ -66,7 +66,7 @@ func TestQueryCommitsWithFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, }, }, @@ -98,7 +98,7 @@ func TestQueryCommitsWithCompositeFieldId(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -131,7 +131,7 @@ func TestQueryCommitsWithCompositeFieldIdWithReturnedSchemaVersionId(t *testing. }`, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, diff --git a/tests/integration/query/commits/with_group_test.go b/tests/integration/query/commits/with_group_test.go index 64439c97e1..d7c539a999 100644 --- a/tests/integration/query/commits/with_group_test.go +++ b/tests/integration/query/commits/with_group_test.go @@ -89,10 +89,10 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(2), "_group": []map[string]any{ { - "cid": "bafybeibybndrw4dida2m2mlwfl42i56pwoxna6ztansrvya4ikejd63kju", + "cid": "bafybeihqgrwnhc4w7e5cbhycxvqrpzgi2ei4xrcsre2plceclptgn4tc3i", }, { - "cid": "bafybeieufqlniob4m5abilofa7iewl3mheykvordbhuhi5g4ewszmxnfvi", + "cid": "bafybeibfwqf5szatmlyl3alru4nq3gnxaiyyb3ggqung2jwb4qnm6mejyu", }, }, }, @@ -100,13 +100,13 @@ func TestQueryCommitsWithGroupByHeightWithChild(t *testing.T) { "height": int64(1), "_group": []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", }, }, }, @@ -142,7 +142,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }`, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "_group": []map[string]any{ { "height": int64(1), @@ -150,7 +150,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "_group": []map[string]any{ { "height": int64(1), @@ -158,7 +158,7 @@ func TestQueryCommitsWithGroupByCidWithChild(t *testing.T) { }, }, { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "_group": []map[string]any{ { "height": int64(1), diff --git a/tests/integration/query/latest_commits/with_doc_id_field_test.go b/tests/integration/query/latest_commits/with_doc_id_field_test.go index d320aff1a8..9f3441e52e 100644 --- a/tests/integration/query/latest_commits/with_doc_id_field_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_field_test.go @@ -68,7 +68,7 @@ func TestQueryLatestCommitsWithDocIDAndFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "links": []map[string]any{}, }, }, @@ -101,14 +101,14 @@ func TestQueryLatestCommitsWithDocIDAndCompositeFieldId(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "links": []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "name": "age", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "name": "name", }, }, diff --git a/tests/integration/query/latest_commits/with_doc_id_test.go b/tests/integration/query/latest_commits/with_doc_id_test.go index 55e0546cdf..4d02a8d96e 100644 --- a/tests/integration/query/latest_commits/with_doc_id_test.go +++ b/tests/integration/query/latest_commits/with_doc_id_test.go @@ -38,14 +38,14 @@ func TestQueryLatestCommitsWithDocID(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "links": []map[string]any{ { - "cid": "bafybeibcr5lkdvcvtr67rpsnvn57hgrhlg36cnmmf7kywjekjodwxytpi4", + "cid": "bafybeihfw5lufgs7ygv45to5rqvt3xkecjgikoccjyx6y2i7lnaclmrcjm", "name": "age", }, { - "cid": "bafybeifw7cu7uweruypv44on2zupjzolyqvyh4ookoeybkztzys67m4hwi", + "cid": "bafybeigmez6gtszsqx6aevzlanvpazhhezw5va4wizhqtqz5k4s2dqjb24", "name": "name", }, }, @@ -75,7 +75,7 @@ func TestQueryLatestCommitsWithDocIDWithSchemaVersionIdField(t *testing.T) { }, Results: []map[string]any{ { - "cid": "bafybeige7qoom3bgjitfisxvhbifou6n4tgguan3ihwbkz5mvbumndeiaa", + "cid": "bafybeiedu23doqe2nagdbmkvfyuouajnfxo7ezy57vbv34dqewhwbfg45u", "schemaVersionId": "bafkreiayhdsgzhmrz6t5d3x2cgqqbdjt7aqgldtlkmxn5eibg542j3n6ea", }, }, diff --git a/tests/integration/query/one_to_many/with_cid_doc_id_test.go b/tests/integration/query/one_to_many/with_cid_doc_id_test.go index f0eb805487..92ec678468 100644 --- a/tests/integration/query/one_to_many/with_cid_doc_id_test.go +++ b/tests/integration/query/one_to_many/with_cid_doc_id_test.go @@ -65,10 +65,10 @@ import ( func TestQueryOneToManyWithCidAndDocID(t *testing.T) { test := testUtils.RequestTestCase{ - Description: "One-to-many relation query from one side with cid and docID", + Description: "One-to-many relation query from one side with cid and docID", Request: `query { Book ( - cid: "bafybeiddywe5odj47ljhyslzey3kbmw3yqdzsstqqjh3ge6cliy2unty64" + cid: "bafybeielrctlwgqx3o5cu3m2636fnfqcizayinyyuemaqhgdgy7ykfhyvi" docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -117,7 +117,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { Description: "One-to-many relation query from one side with child update and parent cid and docID", Request: `query { Book ( - cid: "bafybeiddywe5odj47ljhyslzey3kbmw3yqdzsstqqjh3ge6cliy2unty64", + cid: "bafybeielrctlwgqx3o5cu3m2636fnfqcizayinyyuemaqhgdgy7ykfhyvi", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -129,7 +129,7 @@ func TestQueryOneToManyWithChildUpdateAndFirstCidAndDocID(t *testing.T) { }`, Docs: map[int][]string{ //books - 0: { // bae-fd541c25-229e-5280-b44b-e5c2af3e374d + 0: { // bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d `{ "name": "Painted House", "rating": 4.9, @@ -173,7 +173,7 @@ func TestQueryOneToManyWithParentUpdateAndFirstCidAndDocID(t *testing.T) { Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { Book ( - cid: "bafybeie2okvnf3w3767gspsnln5d6n54hvnmu65wjkadxciopwoi6gxqha", + cid: "bafybeiao32zf3tqrtutibbivxhk4fjjhsryb5q4mqyp3gecqp3s5tgegfy", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name @@ -225,7 +225,7 @@ func TestQueryOneToManyWithParentUpdateAndLastCidAndDocID(t *testing.T) { Description: "One-to-many relation query from one side with parent update and parent cid and docID", Request: `query { Book ( - cid: "bafybeie2okvnf3w3767gspsnln5d6n54hvnmu65wjkadxciopwoi6gxqha", + cid: "bafybeiao32zf3tqrtutibbivxhk4fjjhsryb5q4mqyp3gecqp3s5tgegfy", docID: "bae-b9b83269-1f28-5c3b-ae75-3fb4c00d559d" ) { name diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index 1fa00d05d1..71e3f18869 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -73,7 +73,7 @@ func TestQuerySimpleWithCidAndDocID(t *testing.T) { Description: "Simple query with cid and docID", Request: `query { Users ( - cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + cid: "bafybeigwxfw2nfcwelqxzgjsmm5okrt7dctzvzml4tm7i7q7fsdit3ihz4", docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name @@ -102,7 +102,7 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocID(t *testing.T) { Description: "Simple query with (first) cid and docID", Request: `query { Users ( - cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + cid: "bafybeigwxfw2nfcwelqxzgjsmm5okrt7dctzvzml4tm7i7q7fsdit3ihz4", docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name @@ -143,7 +143,7 @@ func TestQuerySimpleWithUpdateAndLastCidAndDocID(t *testing.T) { Description: "Simple query with (last) cid and docID", Request: `query { Users ( - cid: "bafybeibnj6yitgmynodaxnvtl22rhzclhsrc5asmocwyccsbsamobibpsy", + cid: "bafybeigotwnjltl5y5ou5yqxujdayoqet4axspaclbvzustjhinzqx77ym" docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name @@ -184,7 +184,7 @@ func TestQuerySimpleWithUpdateAndMiddleCidAndDocID(t *testing.T) { Description: "Simple query with (middle) cid and docID", Request: `query { Users ( - cid: "bafybeify36bauenmsov4rijdmency367boy234mjezpvg4dj6r47ay3jwq", + cid: "bafybeib4cdjv4dxmayzgf242hx2r3v5tq5ib5z6oyyrzk3dtddt3wsyyhi", docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" ) { Name @@ -224,18 +224,17 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) test := testUtils.RequestTestCase{ Description: "Simple query with (first) cid and docID and yielded schema version", Request: `query { - Users ( - cid: "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", - docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" - ) { - Name - Age - _version { - schemaVersionId - } - } - }`, - + Users ( + cid: "bafybeigwxfw2nfcwelqxzgjsmm5okrt7dctzvzml4tm7i7q7fsdit3ihz4", + docID: "bae-52b9170d-b77a-5887-b877-cbdbb99b009f" + ) { + Name + Age + _version { + schemaVersionId + } + } + }`, Docs: map[int][]string{ 0: { `{ @@ -269,3 +268,106 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) executeTestCase(t, test) } + +func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with second last cid and docID with pncounter int type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": -5 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": 20 + }`, + }, + testUtils.Request{ + Request: `query { + Users ( + cid: "bafybeiabh6mqnysyrv5phhjikjyl5zgxnpxzxogpip7s7knyujkh7fx3qu", + docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + "points": int64(5), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple query with second last cid and docID with pncounter and float type", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John", + "points": 10.2 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": -5.3 + }`, + }, + testUtils.UpdateDoc{ + Doc: `{ + "points": 20.6 + }`, + }, + testUtils.Request{ + Request: `query { + Users ( + cid: "bafybeiaqw6oxeshkvd3ilzzagjy3c6h776l3hqvmz5loq4sokr7tlxkm5m", + docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" + ) { + name + points + } + }`, + Results: []map[string]any{ + { + "name": "John", + // Note the lack of precision of float types. + "points": 4.8999999999999995, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_version_test.go b/tests/integration/query/simple/with_version_test.go index 2aa571eff7..a40c4d660f 100644 --- a/tests/integration/query/simple/with_version_test.go +++ b/tests/integration/query/simple/with_version_test.go @@ -46,14 +46,14 @@ func TestQuerySimpleWithEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + "cid": "bafybeigwxfw2nfcwelqxzgjsmm5okrt7dctzvzml4tm7i7q7fsdit3ihz4", "links": []map[string]any{ { - "cid": "bafybeigpazmunkmlf5p5jw6fl4supfslupgp2kksvqr7quvhfhsddfa44e", + "cid": "bafybeigcmjyt2ux4mzfckbsz5snkoqrr42vfkesgk7rdw6xzblrowrzfg4", "name": "Age", }, { - "cid": "bafybeibxsjz4krbv3jcbobpdm2igdcvunitu332o6ebsxup53wglkyn6ee", + "cid": "bafybeihkekm4kfn2ttx3wb33l2ps7aductuzd7hrmu6n7zloaicrj5n75u", "name": "Name", }, }, @@ -171,14 +171,14 @@ func TestQuerySimpleWithMultipleAliasedEmbeddedLatestCommit(t *testing.T) { "Age": int64(21), "_version": []map[string]any{ { - "cid": "bafybeiealfslrqsbiwotlducidmesjaemiq2hb7y2bxkcwc7bppuceujui", + "cid": "bafybeigwxfw2nfcwelqxzgjsmm5okrt7dctzvzml4tm7i7q7fsdit3ihz4", "L1": []map[string]any{ { - "cid": "bafybeigpazmunkmlf5p5jw6fl4supfslupgp2kksvqr7quvhfhsddfa44e", + "cid": "bafybeigcmjyt2ux4mzfckbsz5snkoqrr42vfkesgk7rdw6xzblrowrzfg4", "name": "Age", }, { - "cid": "bafybeibxsjz4krbv3jcbobpdm2igdcvunitu332o6ebsxup53wglkyn6ee", + "cid": "bafybeihkekm4kfn2ttx3wb33l2ps7aductuzd7hrmu6n7zloaicrj5n75u", "name": "Name", }, }, diff --git a/tests/integration/schema/crdt_type_test.go b/tests/integration/schema/crdt_type_test.go new file mode 100644 index 0000000000..0df94edcf6 --- /dev/null +++ b/tests/integration/schema/crdt_type_test.go @@ -0,0 +1,134 @@ +// Copyright 2022 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package schema + +import ( + "testing" + + "github.com/sourcenetwork/immutable" + + "github.com/sourcenetwork/defradb/client" + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaCreate_ContainsPNCounterTypeWithIntKind_NoError(t *testing.T) { + schemaVersionID := "bafkreig54q5pw7elljueepsyux4qgdspm3ozct5dqocr5b2kufpjwb2mae" + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.GetSchema{ + VersionID: immutable.Some(schemaVersionID), + ExpectedResults: []client.SchemaDescription{ + { + Name: "Users", + VersionID: schemaVersionID, + Root: schemaVersionID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + { + Name: "points", + ID: 1, + Kind: client.FieldKind_INT, + Typ: client.PN_COUNTER, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPNCounterTypeWithFloatKind_NoError(t *testing.T) { + schemaVersionID := "bafkreibaeypr2i2eg3kozq3mlfsibgtolqlrcozo5ufqfb725dfq3hx43e" + + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Float @crdt(type: "pncounter") + } + `, + }, + testUtils.GetSchema{ + VersionID: immutable.Some(schemaVersionID), + ExpectedResults: []client.SchemaDescription{ + { + Name: "Users", + VersionID: schemaVersionID, + Root: schemaVersionID, + Fields: []client.FieldDescription{ + { + Name: "_docID", + Kind: client.FieldKind_DocID, + }, + { + Name: "points", + ID: 1, + Kind: client.FieldKind_FLOAT, + Typ: client.PN_COUNTER, + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPNCounterTypeWithWrongKind_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: String @crdt(type: "pncounter") + } + `, + ExpectedError: "CRDT type pncounter can't be assigned to field kind String", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaCreate_ContainsPNCounterWithInvalidType_Error(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + points: Int @crdt(type: "invalid") + } + `, + ExpectedError: "CRDT type not supported. Name: points, CRDTType: invalid", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/schema/updates/add/field/crdt/composite_test.go b/tests/integration/schema/updates/add/field/crdt/composite_test.go index e1891f95f7..9648f16564 100644 --- a/tests/integration/schema/updates/add/field/crdt/composite_test.go +++ b/tests/integration/schema/updates/add/field/crdt/composite_test.go @@ -33,7 +33,7 @@ func TestSchemaUpdatesAddFieldCRDTCompositeErrors(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 2, "Typ":3} } ] `, - ExpectedError: "only default or LWW (last writer wins) CRDT types are supported. Name: foo, CRDTType: 3", + ExpectedError: "CRDT type not supported. Name: foo, CRDTType: composite", }, }, } diff --git a/tests/integration/schema/updates/add/field/crdt/invalid_test.go b/tests/integration/schema/updates/add/field/crdt/invalid_test.go index dee615dac2..94ba509b5c 100644 --- a/tests/integration/schema/updates/add/field/crdt/invalid_test.go +++ b/tests/integration/schema/updates/add/field/crdt/invalid_test.go @@ -33,7 +33,7 @@ func TestSchemaUpdatesAddFieldCRDTInvalidErrors(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 2, "Typ":99} } ] `, - ExpectedError: "only default or LWW (last writer wins) CRDT types are supported. Name: foo, CRDTType: 99", + ExpectedError: "CRDT type not supported. Name: foo, CRDTType: unknown", }, }, } diff --git a/tests/integration/schema/updates/add/field/crdt/object_bool_test.go b/tests/integration/schema/updates/add/field/crdt/object_bool_test.go index d36af59dc1..5c274d3a6f 100644 --- a/tests/integration/schema/updates/add/field/crdt/object_bool_test.go +++ b/tests/integration/schema/updates/add/field/crdt/object_bool_test.go @@ -33,7 +33,7 @@ func TestSchemaUpdatesAddFieldCRDTObjectWithBoolFieldErrors(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 2, "Typ":2} } ] `, - ExpectedError: "only default or LWW (last writer wins) CRDT types are supported. Name: foo, CRDTType: 2", + ExpectedError: "CRDT type not supported. Name: foo, CRDTType: object", }, }, } diff --git a/tests/integration/schema/updates/add/field/crdt/pncounter_test.go b/tests/integration/schema/updates/add/field/crdt/pncounter_test.go new file mode 100644 index 0000000000..2664118c0f --- /dev/null +++ b/tests/integration/schema/updates/add/field/crdt/pncounter_test.go @@ -0,0 +1,73 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package crdt + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestSchemaUpdates_AddFieldCRDTPNCounter_NoError(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with crdt PN Counter (4)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 4, "Typ": 4} } + ] + `, + }, + testUtils.Request{ + Request: `query { + Users { + name + foo + } + }`, + Results: []map[string]any{}, + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} + +func TestSchemaUpdates_AddFieldCRDTPNCounterWithMismatchKind_Error(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test schema update, add field with crdt PN Counter (4)", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Users { + name: String + } + `, + }, + testUtils.SchemaPatch{ + Patch: ` + [ + { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "foo", "Kind": 2, "Typ": 4} } + ] + `, + ExpectedError: "CRDT type pncounter can't be assigned to field kind Boolean", + }, + }, + } + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_many/with_introspection_test.go b/tests/integration/view/one_to_many/with_introspection_test.go index 284bd059af..895ed03fcb 100644 --- a/tests/integration/view/one_to_many/with_introspection_test.go +++ b/tests/integration/view/one_to_many/with_introspection_test.go @@ -108,7 +108,7 @@ func TestView_OneToMany_GQLIntrospectionTest(t *testing.T) { ExpectedData: map[string]any{ "__type": map[string]any{ "name": "BookView", - // Note: `_key`, `_version`, `_deleted`, etc should not be present, + // Note: `_docID`, `_version`, `_deleted`, etc should not be present, // although aggregates and `_group` should be. // There should also be no `Author` field - the relationship field // should only exist on the parent. From d2cfd876c5fa3f8a6dbc1e663278a9460351238b Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Tue, 9 Jan 2024 10:33:45 +0100 Subject: [PATCH 40/60] fix: Filtering on unique index if there is no match (#2177) ## Relevant issue(s) Resolves #2176 ## Description Make unique index distinguish between key-not-found and other errors. --- db/fetcher/indexer_iterators.go | 4 +++ ...uery_with_unique_index_only_filter_test.go | 33 +++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/db/fetcher/indexer_iterators.go b/db/fetcher/indexer_iterators.go index d468d9094f..aa24605559 100644 --- a/db/fetcher/indexer_iterators.go +++ b/db/fetcher/indexer_iterators.go @@ -17,6 +17,7 @@ import ( "strings" "github.com/fxamacker/cbor/v2" + ds "github.com/ipfs/go-datastore" "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/connor" @@ -141,6 +142,9 @@ func (i *eqSingleIndexIterator) Next() (indexIterResult, error) { i.indexKey.FieldValues = [][]byte{i.value} val, err := i.store.Get(i.ctx, i.indexKey.ToDS()) if err != nil { + if errors.Is(err, ds.ErrNotFound) { + return indexIterResult{key: i.indexKey}, nil + } return indexIterResult{}, err } i.store = nil diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go index 54ac7b2d8d..11cf0be8a5 100644 --- a/tests/integration/index/query_with_unique_index_only_filter_test.go +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -461,3 +461,36 @@ func TestQueryWithUniqueIndex_WithNotLikeFilter_ShouldFetch(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestQueryWithUniqueIndex_IfNoMatch_ReturnEmptyResult(t *testing.T) { + test := testUtils.TestCase{ + Description: "If filter does not match any document, return empty result", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` { + "name": "Shahzad", + "age": 23 + }`, + }, + testUtils.Request{ + Request: `query { + User(filter: {age: {_eq: 20}}) { + name + } + }`, + Results: []map[string]any{}, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 4c36d70784b34fc84251f426d690c5d7d8a130e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 11:34:31 -0800 Subject: [PATCH 41/60] bot: Bump github.com/getkin/kin-openapi from 0.120.0 to 0.122.0 (#2097) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/getkin/kin-openapi](https://github.com/getkin/kin-openapi) from 0.120.0 to 0.122.0.
Release notes

Sourced from github.com/getkin/kin-openapi's releases.

v0.122.0

What's Changed

Full Changelog: https://github.com/getkin/kin-openapi/compare/v0.121.0...v0.122.0

v0.121.0

What's Changed

New Contributors

Full Changelog: https://github.com/getkin/kin-openapi/compare/v0.120.0...v0.121.0

Commits
  • 6740cd2 openapi3: add support for extensions on the few types left (#763)
  • f6d1b8c fix after #870: make sure Bis does not surface up (#878)
  • 0cc5e22 docs.sh: fix narrow docs checks spectrum (#877)
  • 4e7d031 openapi3: correct implementations of JSONLookup (#876)
  • c1681a9 openapi3: rename type of Components.Responses to ResponseBodies (from Respons...
  • 663b0dd openapi3: refacto ref-resolving end conditions (#874)
  • 377bb40 openapi{2,3}: simplify unmarshal errors (#870)
  • fe1c5f5 close #594: yaml "control characters are not allowed" no longer reproducible ...
  • e7a726a openapi3: support \uC4FE codepoint syntax in Schema.Pattern (#873)
  • 582e6d0 openapi3: handle refs missing fragment (#511)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/getkin/kin-openapi&package-manager=go_modules&previous-version=0.120.0&new-version=0.122.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Keenan Nemetz --- go.mod | 2 +- go.sum | 4 ++-- http/handler_ccip.go | 4 ++-- http/handler_collection.go | 40 ++++++++++++++++++------------------ http/handler_lens.go | 32 ++++++++++++++--------------- http/handler_p2p.go | 30 +++++++++++++-------------- http/handler_store.go | 42 +++++++++++++++++++------------------- http/handler_tx.go | 16 +++++++-------- http/openapi.go | 4 ++-- 9 files changed, 87 insertions(+), 87 deletions(-) diff --git a/go.mod b/go.mod index 11420aaf2b..d2c1f2aeb4 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ require ( github.com/bxcodec/faker v2.0.1+incompatible github.com/evanphx/json-patch/v5 v5.7.0 github.com/fxamacker/cbor/v2 v2.5.0 - github.com/getkin/kin-openapi v0.120.0 + github.com/getkin/kin-openapi v0.122.0 github.com/go-chi/chi/v5 v5.0.11 github.com/go-chi/cors v1.2.1 github.com/go-errors/errors v1.5.1 diff --git a/go.sum b/go.sum index 52bbcea39e..eca55e42f6 100644 --- a/go.sum +++ b/go.sum @@ -94,8 +94,8 @@ github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nos github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fxamacker/cbor/v2 v2.5.0 h1:oHsG0V/Q6E/wqTS2O1Cozzsy69nqCiguo5Q1a1ADivE= github.com/fxamacker/cbor/v2 v2.5.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo= -github.com/getkin/kin-openapi v0.120.0 h1:MqJcNJFrMDFNc07iwE8iFC5eT2k/NPUFDIpNeiZv8Jg= -github.com/getkin/kin-openapi v0.120.0/go.mod h1:PCWw/lfBrJY4HcdqE3jj+QFkaFK8ABoqo7PvqVhXXqw= +github.com/getkin/kin-openapi v0.122.0 h1:WB9Jbl0Hp/T79/JF9xlSW5Kl9uYdk/AWD0yAd9HOM10= +github.com/getkin/kin-openapi v0.122.0/go.mod h1:PCWw/lfBrJY4HcdqE3jj+QFkaFK8ABoqo7PvqVhXXqw= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-chi/chi/v5 v5.0.11 h1:BnpYbFZ3T3S1WMpD79r7R5ThWX40TaFB7L31Y8xqSwA= diff --git a/http/handler_ccip.go b/http/handler_ccip.go index d2a9ad6783..c0eb6a5918 100644 --- a/http/handler_ccip.go +++ b/http/handler_ccip.go @@ -100,7 +100,7 @@ func (h *ccipHandler) bindRoutes(router *Router) { Value: ccipRequest, } ccipPost.AddResponse(200, ccipResponse) - ccipPost.Responses["400"] = errorResponse + ccipPost.Responses.Set("400", errorResponse) dataPathParam := openapi3.NewPathParameter("data"). WithDescription("Hex encoded request data"). @@ -117,7 +117,7 @@ func (h *ccipHandler) bindRoutes(router *Router) { ccipGet.AddParameter(dataPathParam) ccipGet.AddParameter(senderPathParam) ccipGet.AddResponse(200, ccipResponse) - ccipGet.Responses["400"] = errorResponse + ccipGet.Responses.Set("400", errorResponse) router.AddRoute("/ccip/{sender}/{data}", http.MethodGet, ccipGet, h.ExecCCIP) router.AddRoute("/ccip", http.MethodPost, ccipPost, h.ExecCCIP) diff --git a/http/handler_collection.go b/http/handler_collection.go index 8595fbb267..d713afdf40 100644 --- a/http/handler_collection.go +++ b/http/handler_collection.go @@ -379,9 +379,9 @@ func (h *collectionHandler) bindRoutes(router *Router) { collectionCreate.RequestBody = &openapi3.RequestBodyRef{ Value: collectionCreateRequest, } - collectionCreate.Responses = make(openapi3.Responses) - collectionCreate.Responses["200"] = successResponse - collectionCreate.Responses["400"] = errorResponse + collectionCreate.Responses = openapi3.NewResponses() + collectionCreate.Responses.Set("200", successResponse) + collectionCreate.Responses.Set("400", errorResponse) collectionUpdateWithRequest := openapi3.NewRequestBody(). WithRequired(true). @@ -400,7 +400,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { Value: collectionUpdateWithRequest, } collectionUpdateWith.AddResponse(200, collectionUpdateWithResponse) - collectionUpdateWith.Responses["400"] = errorResponse + collectionUpdateWith.Responses.Set("400", errorResponse) collectionDeleteWithRequest := openapi3.NewRequestBody(). WithRequired(true). @@ -419,7 +419,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { Value: collectionDeleteWithRequest, } collectionDeleteWith.AddResponse(200, collectionDeleteWithResponse) - collectionDeleteWith.Responses["400"] = errorResponse + collectionDeleteWith.Responses.Set("400", errorResponse) createIndexRequest := openapi3.NewRequestBody(). WithRequired(true). @@ -437,7 +437,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { Value: createIndexRequest, } createIndex.AddResponse(200, createIndexResponse) - createIndex.Responses["400"] = errorResponse + createIndex.Responses.Set("400", errorResponse) indexArraySchema := openapi3.NewArraySchema() indexArraySchema.Items = indexSchema @@ -452,7 +452,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { getIndexes.Tags = []string{"index"} getIndexes.AddParameter(collectionNamePathParam) getIndexes.AddResponse(200, getIndexesResponse) - getIndexes.Responses["400"] = errorResponse + getIndexes.Responses.Set("400", errorResponse) indexPathParam := openapi3.NewPathParameter("index"). WithRequired(true). @@ -464,9 +464,9 @@ func (h *collectionHandler) bindRoutes(router *Router) { dropIndex.Tags = []string{"index"} dropIndex.AddParameter(collectionNamePathParam) dropIndex.AddParameter(indexPathParam) - dropIndex.Responses = make(openapi3.Responses) - dropIndex.Responses["200"] = successResponse - dropIndex.Responses["400"] = errorResponse + dropIndex.Responses = openapi3.NewResponses() + dropIndex.Responses.Set("200", successResponse) + dropIndex.Responses.Set("400", errorResponse) documentIDPathParam := openapi3.NewPathParameter("docID"). WithRequired(true). @@ -483,7 +483,7 @@ func (h *collectionHandler) bindRoutes(router *Router) { collectionGet.AddParameter(collectionNamePathParam) collectionGet.AddParameter(documentIDPathParam) collectionGet.AddResponse(200, collectionGetResponse) - collectionGet.Responses["400"] = errorResponse + collectionGet.Responses.Set("400", errorResponse) collectionUpdate := openapi3.NewOperation() collectionUpdate.Description = "Update a document by docID" @@ -491,9 +491,9 @@ func (h *collectionHandler) bindRoutes(router *Router) { collectionUpdate.Tags = []string{"collection"} collectionUpdate.AddParameter(collectionNamePathParam) collectionUpdate.AddParameter(documentIDPathParam) - collectionUpdate.Responses = make(openapi3.Responses) - collectionUpdate.Responses["200"] = successResponse - collectionUpdate.Responses["400"] = errorResponse + collectionUpdate.Responses = openapi3.NewResponses() + collectionUpdate.Responses.Set("200", successResponse) + collectionUpdate.Responses.Set("400", errorResponse) collectionDelete := openapi3.NewOperation() collectionDelete.Description = "Delete a document by docID" @@ -501,18 +501,18 @@ func (h *collectionHandler) bindRoutes(router *Router) { collectionDelete.Tags = []string{"collection"} collectionDelete.AddParameter(collectionNamePathParam) collectionDelete.AddParameter(documentIDPathParam) - collectionDelete.Responses = make(openapi3.Responses) - collectionDelete.Responses["200"] = successResponse - collectionDelete.Responses["400"] = errorResponse + collectionDelete.Responses = openapi3.NewResponses() + collectionDelete.Responses.Set("200", successResponse) + collectionDelete.Responses.Set("400", errorResponse) collectionKeys := openapi3.NewOperation() collectionKeys.AddParameter(collectionNamePathParam) collectionKeys.Description = "Get all document IDs" collectionKeys.OperationID = "collection_keys" collectionKeys.Tags = []string{"collection"} - collectionKeys.Responses = make(openapi3.Responses) - collectionKeys.Responses["200"] = successResponse - collectionKeys.Responses["400"] = errorResponse + collectionKeys.Responses = openapi3.NewResponses() + collectionKeys.Responses.Set("200", successResponse) + collectionKeys.Responses.Set("400", errorResponse) router.AddRoute("/collections/{name}", http.MethodGet, collectionKeys, h.GetAllDocIDs) router.AddRoute("/collections/{name}", http.MethodPost, collectionCreate, h.Create) diff --git a/http/handler_lens.go b/http/handler_lens.go index a06a4d09f1..5d0838b76a 100644 --- a/http/handler_lens.go +++ b/http/handler_lens.go @@ -147,7 +147,7 @@ func (h *lensHandler) bindRoutes(router *Router) { lensConfig.Description = "List lens migrations" lensConfig.Tags = []string{"lens"} lensConfig.AddResponse(200, lensConfigResponse) - lensConfig.Responses["400"] = errorResponse + lensConfig.Responses.Set("400", errorResponse) setMigrationRequest := openapi3.NewRequestBody(). WithRequired(true). @@ -160,17 +160,17 @@ func (h *lensHandler) bindRoutes(router *Router) { setMigration.RequestBody = &openapi3.RequestBodyRef{ Value: setMigrationRequest, } - setMigration.Responses = make(openapi3.Responses) - setMigration.Responses["200"] = successResponse - setMigration.Responses["400"] = errorResponse + setMigration.Responses = openapi3.NewResponses() + setMigration.Responses.Set("200", successResponse) + setMigration.Responses.Set("400", errorResponse) reloadLenses := openapi3.NewOperation() reloadLenses.OperationID = "lens_reload" reloadLenses.Description = "Reload lens migrations" reloadLenses.Tags = []string{"lens"} - reloadLenses.Responses = make(openapi3.Responses) - reloadLenses.Responses["200"] = successResponse - reloadLenses.Responses["400"] = errorResponse + reloadLenses.Responses = openapi3.NewResponses() + reloadLenses.Responses.Set("200", successResponse) + reloadLenses.Responses.Set("400", errorResponse) versionPathParam := openapi3.NewPathParameter("version"). WithRequired(true). @@ -181,9 +181,9 @@ func (h *lensHandler) bindRoutes(router *Router) { hasMigration.Description = "Check if a migration exists" hasMigration.Tags = []string{"lens"} hasMigration.AddParameter(versionPathParam) - hasMigration.Responses = make(openapi3.Responses) - hasMigration.Responses["200"] = successResponse - hasMigration.Responses["400"] = errorResponse + hasMigration.Responses = openapi3.NewResponses() + hasMigration.Responses.Set("200", successResponse) + hasMigration.Responses.Set("400", errorResponse) migrateSchema := openapi3.NewArraySchema() migrateSchema.Items = documentSchema @@ -199,9 +199,9 @@ func (h *lensHandler) bindRoutes(router *Router) { Value: migrateRequest, } migrateUp.AddParameter(versionPathParam) - migrateUp.Responses = make(openapi3.Responses) - migrateUp.Responses["200"] = successResponse - migrateUp.Responses["400"] = errorResponse + migrateUp.Responses = openapi3.NewResponses() + migrateUp.Responses.Set("200", successResponse) + migrateUp.Responses.Set("400", errorResponse) migrateDown := openapi3.NewOperation() migrateDown.OperationID = "lens_migrate_down" @@ -211,9 +211,9 @@ func (h *lensHandler) bindRoutes(router *Router) { Value: migrateRequest, } migrateDown.AddParameter(versionPathParam) - migrateDown.Responses = make(openapi3.Responses) - migrateDown.Responses["200"] = successResponse - migrateDown.Responses["400"] = errorResponse + migrateDown.Responses = openapi3.NewResponses() + migrateDown.Responses.Set("200", successResponse) + migrateDown.Responses.Set("400", errorResponse) router.AddRoute("/lens", http.MethodGet, lensConfig, h.Config) router.AddRoute("/lens", http.MethodPost, setMigration, h.SetMigration) diff --git a/http/handler_p2p.go b/http/handler_p2p.go index 73727ec297..13fc88a90c 100644 --- a/http/handler_p2p.go +++ b/http/handler_p2p.go @@ -161,7 +161,7 @@ func (h *p2pHandler) bindRoutes(router *Router) { peerInfo.OperationID = "peer_info" peerInfo.Tags = []string{"p2p"} peerInfo.AddResponse(200, peerInfoResponse) - peerInfo.Responses["400"] = errorResponse + peerInfo.Responses.Set("400", errorResponse) getReplicatorsSchema := openapi3.NewArraySchema() getReplicatorsSchema.Items = replicatorSchema @@ -174,7 +174,7 @@ func (h *p2pHandler) bindRoutes(router *Router) { getReplicators.OperationID = "peer_replicator_list" getReplicators.Tags = []string{"p2p"} getReplicators.AddResponse(200, getReplicatorsResponse) - getReplicators.Responses["400"] = errorResponse + getReplicators.Responses.Set("400", errorResponse) replicatorRequest := openapi3.NewRequestBody(). WithRequired(true). @@ -187,9 +187,9 @@ func (h *p2pHandler) bindRoutes(router *Router) { setReplicator.RequestBody = &openapi3.RequestBodyRef{ Value: replicatorRequest, } - setReplicator.Responses = make(openapi3.Responses) - setReplicator.Responses["200"] = successResponse - setReplicator.Responses["400"] = errorResponse + setReplicator.Responses = openapi3.NewResponses() + setReplicator.Responses.Set("200", successResponse) + setReplicator.Responses.Set("400", errorResponse) deleteReplicator := openapi3.NewOperation() deleteReplicator.Description = "Delete peer replicators" @@ -198,9 +198,9 @@ func (h *p2pHandler) bindRoutes(router *Router) { deleteReplicator.RequestBody = &openapi3.RequestBodyRef{ Value: replicatorRequest, } - deleteReplicator.Responses = make(openapi3.Responses) - deleteReplicator.Responses["200"] = successResponse - deleteReplicator.Responses["400"] = errorResponse + deleteReplicator.Responses = openapi3.NewResponses() + deleteReplicator.Responses.Set("200", successResponse) + deleteReplicator.Responses.Set("400", errorResponse) peerCollectionsSchema := openapi3.NewArraySchema(). WithItems(openapi3.NewStringSchema()) @@ -218,7 +218,7 @@ func (h *p2pHandler) bindRoutes(router *Router) { getPeerCollections.OperationID = "peer_collection_list" getPeerCollections.Tags = []string{"p2p"} getPeerCollections.AddResponse(200, getPeerCollectionsResponse) - getPeerCollections.Responses["400"] = errorResponse + getPeerCollections.Responses.Set("400", errorResponse) addPeerCollections := openapi3.NewOperation() addPeerCollections.Description = "Add peer collections" @@ -227,9 +227,9 @@ func (h *p2pHandler) bindRoutes(router *Router) { addPeerCollections.RequestBody = &openapi3.RequestBodyRef{ Value: peerCollectionRequest, } - addPeerCollections.Responses = make(openapi3.Responses) - addPeerCollections.Responses["200"] = successResponse - addPeerCollections.Responses["400"] = errorResponse + addPeerCollections.Responses = openapi3.NewResponses() + addPeerCollections.Responses.Set("200", successResponse) + addPeerCollections.Responses.Set("400", errorResponse) removePeerCollections := openapi3.NewOperation() removePeerCollections.Description = "Remove peer collections" @@ -238,9 +238,9 @@ func (h *p2pHandler) bindRoutes(router *Router) { removePeerCollections.RequestBody = &openapi3.RequestBodyRef{ Value: peerCollectionRequest, } - removePeerCollections.Responses = make(openapi3.Responses) - removePeerCollections.Responses["200"] = successResponse - removePeerCollections.Responses["400"] = errorResponse + removePeerCollections.Responses = openapi3.NewResponses() + removePeerCollections.Responses.Set("200", successResponse) + removePeerCollections.Responses.Set("400", errorResponse) router.AddRoute("/p2p/info", http.MethodGet, peerInfo, h.PeerInfo) router.AddRoute("/p2p/replicators", http.MethodGet, getReplicators, h.GetAllReplicators) diff --git a/http/handler_store.go b/http/handler_store.go index 0e9f0c2ed2..2a1ff97531 100644 --- a/http/handler_store.go +++ b/http/handler_store.go @@ -385,7 +385,7 @@ func (h *storeHandler) bindRoutes(router *Router) { Value: addSchemaRequest, } addSchema.AddResponse(200, addSchemaResponse) - addSchema.Responses["400"] = errorResponse + addSchema.Responses.Set("400", errorResponse) patchSchemaRequest := openapi3.NewRequestBody(). WithJSONSchemaRef(patchSchemaRequestSchema) @@ -397,9 +397,9 @@ func (h *storeHandler) bindRoutes(router *Router) { patchSchema.RequestBody = &openapi3.RequestBodyRef{ Value: patchSchemaRequest, } - patchSchema.Responses = make(openapi3.Responses) - patchSchema.Responses["200"] = successResponse - patchSchema.Responses["400"] = errorResponse + patchSchema.Responses = openapi3.NewResponses() + patchSchema.Responses.Set("200", successResponse) + patchSchema.Responses.Set("400", errorResponse) setDefaultSchemaVersionRequest := openapi3.NewRequestBody(). WithContent(openapi3.NewContentWithSchema(openapi3.NewStringSchema(), []string{"text/plain"})) @@ -411,9 +411,9 @@ func (h *storeHandler) bindRoutes(router *Router) { setDefaultSchemaVersion.RequestBody = &openapi3.RequestBodyRef{ Value: setDefaultSchemaVersionRequest, } - setDefaultSchemaVersion.Responses = make(openapi3.Responses) - setDefaultSchemaVersion.Responses["200"] = successResponse - setDefaultSchemaVersion.Responses["400"] = errorResponse + setDefaultSchemaVersion.Responses = openapi3.NewResponses() + setDefaultSchemaVersion.Responses.Set("200", successResponse) + setDefaultSchemaVersion.Responses.Set("400", errorResponse) backupRequest := openapi3.NewRequestBody(). WithRequired(true). @@ -423,9 +423,9 @@ func (h *storeHandler) bindRoutes(router *Router) { backupExport.OperationID = "backup_export" backupExport.Description = "Export a database backup to file" backupExport.Tags = []string{"backup"} - backupExport.Responses = make(openapi3.Responses) - backupExport.Responses["200"] = successResponse - backupExport.Responses["400"] = errorResponse + backupExport.Responses = openapi3.NewResponses() + backupExport.Responses.Set("200", successResponse) + backupExport.Responses.Set("400", errorResponse) backupExport.RequestBody = &openapi3.RequestBodyRef{ Value: backupRequest, } @@ -434,9 +434,9 @@ func (h *storeHandler) bindRoutes(router *Router) { backupImport.OperationID = "backup_import" backupImport.Description = "Import a database backup from file" backupImport.Tags = []string{"backup"} - backupImport.Responses = make(openapi3.Responses) - backupImport.Responses["200"] = successResponse - backupImport.Responses["400"] = errorResponse + backupImport.Responses = openapi3.NewResponses() + backupImport.Responses.Set("200", successResponse) + backupImport.Responses.Set("400", errorResponse) backupImport.RequestBody = &openapi3.RequestBodyRef{ Value: backupRequest, } @@ -472,7 +472,7 @@ func (h *storeHandler) bindRoutes(router *Router) { collectionDescribe.AddParameter(collectionSchemaRootQueryParam) collectionDescribe.AddParameter(collectionVersionIdQueryParam) collectionDescribe.AddResponse(200, collectionsResponse) - collectionDescribe.Responses["400"] = errorResponse + collectionDescribe.Responses.Set("400", errorResponse) collectionDefintionsSchema := openapi3.NewArraySchema() collectionDefintionsSchema.Items = collectionDefinitionSchema @@ -499,7 +499,7 @@ func (h *storeHandler) bindRoutes(router *Router) { Value: addViewRequest, } views.AddResponse(200, addViewResponse) - views.Responses["400"] = errorResponse + views.Responses.Set("400", errorResponse) schemaNameQueryParam := openapi3.NewQueryParameter("name"). WithDescription("Schema name"). @@ -532,7 +532,7 @@ func (h *storeHandler) bindRoutes(router *Router) { schemaDescribe.AddParameter(schemaSchemaRootQueryParam) schemaDescribe.AddParameter(schemaVersionIDQueryParam) schemaDescribe.AddResponse(200, schemaResponse) - schemaDescribe.Responses["400"] = errorResponse + schemaDescribe.Responses.Set("400", errorResponse) graphQLRequest := openapi3.NewRequestBody(). WithContent(openapi3.NewContentWithJSONSchemaRef(graphQLRequestSchema)) @@ -549,7 +549,7 @@ func (h *storeHandler) bindRoutes(router *Router) { Value: graphQLRequest, } graphQLPost.AddResponse(200, graphQLResponse) - graphQLPost.Responses["400"] = errorResponse + graphQLPost.Responses.Set("400", errorResponse) graphQLQueryParam := openapi3.NewQueryParameter("query"). WithSchema(openapi3.NewStringSchema()) @@ -560,15 +560,15 @@ func (h *storeHandler) bindRoutes(router *Router) { graphQLGet.Tags = []string{"graphql"} graphQLGet.AddParameter(graphQLQueryParam) graphQLGet.AddResponse(200, graphQLResponse) - graphQLGet.Responses["400"] = errorResponse + graphQLGet.Responses.Set("400", errorResponse) debugDump := openapi3.NewOperation() debugDump.Description = "Dump database" debugDump.OperationID = "debug_dump" debugDump.Tags = []string{"debug"} - debugDump.Responses = make(openapi3.Responses) - debugDump.Responses["200"] = successResponse - debugDump.Responses["400"] = errorResponse + debugDump.Responses = openapi3.NewResponses() + debugDump.Responses.Set("200", successResponse) + debugDump.Responses.Set("400", errorResponse) router.AddRoute("/backup/export", http.MethodPost, backupExport, h.BasicExport) router.AddRoute("/backup/import", http.MethodPost, backupImport, h.BasicImport) diff --git a/http/handler_tx.go b/http/handler_tx.go index 6bdb6b2009..e28acab3df 100644 --- a/http/handler_tx.go +++ b/http/handler_tx.go @@ -120,7 +120,7 @@ func (h *txHandler) bindRoutes(router *Router) { txnCreate.Tags = []string{"transaction"} txnCreate.AddParameter(txnReadOnlyQueryParam) txnCreate.AddResponse(200, txnCreateResponse) - txnCreate.Responses["400"] = errorResponse + txnCreate.Responses.Set("400", errorResponse) txnConcurrent := openapi3.NewOperation() txnConcurrent.OperationID = "new_concurrent_transaction" @@ -128,7 +128,7 @@ func (h *txHandler) bindRoutes(router *Router) { txnConcurrent.Tags = []string{"transaction"} txnConcurrent.AddParameter(txnReadOnlyQueryParam) txnConcurrent.AddResponse(200, txnCreateResponse) - txnConcurrent.Responses["400"] = errorResponse + txnConcurrent.Responses.Set("400", errorResponse) txnIdPathParam := openapi3.NewPathParameter("id"). WithRequired(true). @@ -139,18 +139,18 @@ func (h *txHandler) bindRoutes(router *Router) { txnCommit.Description = "Commit a transaction" txnCommit.Tags = []string{"transaction"} txnCommit.AddParameter(txnIdPathParam) - txnCommit.Responses = make(openapi3.Responses) - txnCommit.Responses["200"] = successResponse - txnCommit.Responses["400"] = errorResponse + txnCommit.Responses = openapi3.NewResponses() + txnCommit.Responses.Set("200", successResponse) + txnCommit.Responses.Set("400", errorResponse) txnDiscard := openapi3.NewOperation() txnDiscard.OperationID = "transaction_discard" txnDiscard.Description = "Discard a transaction" txnDiscard.Tags = []string{"transaction"} txnDiscard.AddParameter(txnIdPathParam) - txnDiscard.Responses = make(openapi3.Responses) - txnDiscard.Responses["200"] = successResponse - txnDiscard.Responses["400"] = errorResponse + txnDiscard.Responses = openapi3.NewResponses() + txnDiscard.Responses.Set("200", successResponse) + txnDiscard.Responses.Set("400", errorResponse) router.AddRoute("/tx", http.MethodPost, txnCreate, h.NewTxn) router.AddRoute("/tx/concurrent", http.MethodPost, txnConcurrent, h.NewConcurrentTxn) diff --git a/http/openapi.go b/http/openapi.go index 13e2b01f27..fc10881f5b 100644 --- a/http/openapi.go +++ b/http/openapi.go @@ -44,7 +44,7 @@ var openApiSchemas = map[string]any{ func NewOpenAPISpec() (*openapi3.T, error) { schemas := make(openapi3.Schemas) - responses := make(openapi3.Responses) + responses := make(openapi3.ResponseBodies) parameters := make(openapi3.ParametersMap) generator := openapi3gen.NewGenerator(openapi3gen.UseAllExportedFields()) @@ -91,7 +91,7 @@ func NewOpenAPISpec() (*openapi3.T, error) { Title: "DefraDB API", Version: "0", }, - Paths: make(openapi3.Paths), + Paths: openapi3.NewPaths(), Servers: openapi3.Servers{ &openapi3.Server{ Description: "Local DefraDB instance", From eea7699bd79780ee25499657f309e60e255046f1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 9 Jan 2024 12:02:58 -0800 Subject: [PATCH 42/60] bot: Bump follow-redirects from 1.15.3 to 1.15.4 in /playground (#2181) Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.3 to 1.15.4.
Commits
  • 6585820 Release version 1.15.4 of the npm package.
  • 7a6567e Disallow bracketed hostnames.
  • 05629af Prefer native URL instead of deprecated url.parse.
  • 1cba8e8 Prefer native URL instead of legacy url.resolve.
  • 72bc2a4 Simplify _processResponse error handling.
  • 3d42aec Add bracket tests.
  • bcbb096 Do not directly set Error properties.
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=follow-redirects&package-manager=npm_and_yarn&previous-version=1.15.3&new-version=1.15.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/sourcenetwork/defradb/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 105 +---------------------------------- 1 file changed, 3 insertions(+), 102 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 48478dacb6..aa74ece173 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -2502,23 +2502,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.16.0.tgz", - "integrity": "sha512-0N7Y9DSPdaBQ3sqSCwlrm9zJwkpOuc6HYm7LpzLAPqBL7dmzAUimr4M29dMkOP/tEwvOCC/Cxo//yOfJD3HUiw==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.16.0", - "@typescript-eslint/visitor-keys": "6.16.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/type-utils": { "version": "6.18.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.0.tgz", @@ -2628,71 +2611,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@typescript-eslint/types": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.16.0.tgz", - "integrity": "sha512-hvDFpLEvTJoHutVl87+MG/c5C8I6LOgEx05zExTSJDEVU7hhR3jhV8M5zuggbdFCw98+HhZWPHZeKS97kS3JoQ==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.16.0.tgz", - "integrity": "sha512-VTWZuixh/vr7nih6CfrdpmFNLEnoVBF1skfjdyGnNwXOH1SLeHItGdZDHhhAIzd3ACazyY2Fg76zuzOVTaknGA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.16.0", - "@typescript-eslint/visitor-keys": "6.16.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "minimatch": "9.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@typescript-eslint/utils": { "version": "6.18.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.18.0.tgz", @@ -2817,23 +2735,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@typescript-eslint/visitor-keys": { - "version": "6.16.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.16.0.tgz", - "integrity": "sha512-QSFQLruk7fhs91a/Ep/LqRdbJCZ1Rq03rqBdKT5Ky17Sz8zRLUksqIe9DW0pKtg/Z35/ztbLQ6qpOCN6rOC11A==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.16.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@ungap/structured-clone": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", @@ -3792,9 +3693,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.3.tgz", - "integrity": "sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q==", + "version": "1.15.4", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", + "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", "funding": [ { "type": "individual", From ffd7f0b7bcd4743c667ce525767ef822e03782ec Mon Sep 17 00:00:00 2001 From: Islam Aliev Date: Wed, 10 Jan 2024 15:42:00 +0100 Subject: [PATCH 43/60] fix: Handle multiple nil values on unique indexed fields (#2178) ## Relevant issue(s) Resolves #2174 #2175 ## Description This change fixes how nil values are handled on unique indexes --- db/collection_index.go | 14 +-- db/errors.go | 4 +- db/index.go | 12 +- tests/integration/index/create_unique_test.go | 112 ++++++++++++++++++ ...uery_with_unique_index_only_filter_test.go | 38 ++++++ 5 files changed, 168 insertions(+), 12 deletions(-) diff --git a/db/collection_index.go b/db/collection_index.go index 531c839280..4367d8ebdf 100644 --- a/db/collection_index.go +++ b/db/collection_index.go @@ -13,6 +13,7 @@ package db import ( "context" "encoding/json" + "errors" "fmt" "strconv" "strings" @@ -236,23 +237,20 @@ func (c *collection) iterateAllDocs( df := c.newFetcher() err := df.Init(ctx, txn, c, fields, nil, nil, false, false) if err != nil { - _ = df.Close() - return err + return errors.Join(err, df.Close()) } start := base.MakeDataStoreKeyWithCollectionDescription(c.Description()) spans := core.NewSpans(core.NewSpan(start, start.PrefixEnd())) err = df.Start(ctx, spans) if err != nil { - _ = df.Close() - return err + return errors.Join(err, df.Close()) } for { encodedDoc, _, err := df.FetchNext(ctx) if err != nil { - _ = df.Close() - return err + return errors.Join(err, df.Close()) } if encodedDoc == nil { break @@ -260,12 +258,12 @@ func (c *collection) iterateAllDocs( doc, err := fetcher.Decode(encodedDoc, c.Schema()) if err != nil { - return err + return errors.Join(err, df.Close()) } err = exec(doc) if err != nil { - return err + return errors.Join(err, df.Close()) } } diff --git a/db/errors.go b/db/errors.go index 37695097b0..d8c9773926 100644 --- a/db/errors.go +++ b/db/errors.go @@ -86,7 +86,7 @@ const ( errExpectedJSONArray string = "expected JSON array" errOneOneAlreadyLinked string = "target document is already linked to another document" errIndexDoesNotMatchName string = "the index used does not match the given name" - errCanNotIndexNonUniqueField string = "can not create doc that violates unique index" + errCanNotIndexNonUniqueField string = "can not index a doc's field that violates unique index" errInvalidViewQuery string = "the query provided is not valid as a View" ) @@ -579,7 +579,7 @@ func NewErrInvalidViewQueryCastFailed(query string) error { return errors.New( errInvalidViewQuery, errors.NewKV("Query", query), - errors.NewKV("Reason", "Internal errror, cast failed"), + errors.NewKV("Reason", "Internal error, cast failed"), ) } diff --git a/db/index.go b/db/index.go index aa8a56bcd2..59fd25eaa9 100644 --- a/db/index.go +++ b/db/index.go @@ -290,10 +290,18 @@ func (i *collectionUniqueIndex) newUniqueIndexError( doc *client.Document, ) error { fieldVal, err := doc.GetValue(i.fieldDesc.Name) + var val any if err != nil { - return err + // If the error is ErrFieldNotExist, we leave `val` as is (e.g. nil) + // otherwise we return the error + if !errors.Is(err, client.ErrFieldNotExist) { + return err + } + } else { + val = fieldVal.Value() } - return NewErrCanNotIndexNonUniqueField(doc.ID().String(), i.fieldDesc.Name, fieldVal.Value()) + + return NewErrCanNotIndexNonUniqueField(doc.ID().String(), i.fieldDesc.Name, val) } func (i *collectionUniqueIndex) Update( diff --git a/tests/integration/index/create_unique_test.go b/tests/integration/index/create_unique_test.go index 69731e7b46..fac2330a28 100644 --- a/tests/integration/index/create_unique_test.go +++ b/tests/integration/index/create_unique_test.go @@ -186,3 +186,115 @@ func TestUniqueIndexCreate_IfFieldValuesAreUnique_Succeed(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestUniqueIndexCreate_IfNilFieldsArePresent_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "If filter does not match any document, return empty result", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Andy" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Keenan" + }`, + }, + testUtils.CreateIndex{ + CollectionID: 0, + FieldName: "age", + Unique: true, + ExpectedError: db.NewErrCanNotIndexNonUniqueField("bae-caba9876-89aa-5bcf-bc1c-387a52499b27", "age", nil).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestUniqueIndexCreate_AddingDocWithNilValue_ShouldSucceed(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test adding a doc with nil value for indexed field should succeed", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John" + }`, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestUniqueIndexCreate_UponAddingDocWithExistingNilValue_ReturnError(t *testing.T) { + test := testUtils.TestCase{ + Description: "If filter does not match any document, return empty result", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "John", + "age": 21 + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Keenan" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Andy" + }`, + ExpectedError: db.NewErrCanNotIndexNonUniqueField("bae-2159860f-3cd1-59de-9440-71331e77cbb8", "age", nil).Error(), + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/index/query_with_unique_index_only_filter_test.go b/tests/integration/index/query_with_unique_index_only_filter_test.go index 11cf0be8a5..ad453409d4 100644 --- a/tests/integration/index/query_with_unique_index_only_filter_test.go +++ b/tests/integration/index/query_with_unique_index_only_filter_test.go @@ -494,3 +494,41 @@ func TestQueryWithUniqueIndex_IfNoMatch_ReturnEmptyResult(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestQueryWithUniqueIndex_WithEqualFilterOnNilValue_ShouldFetch(t *testing.T) { + test := testUtils.TestCase{ + Description: "Test index filtering with _eq filter on nil value", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int @index(unique: true) + }`, + }, + testUtils.CreatePredefinedDocs{ + Docs: getUserDocs(), + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: ` + { + "name": "Alice" + }`, + }, + testUtils.Request{ + Request: ` + query { + User(filter: {age: {_eq: null}}) { + name + } + }`, + Results: []map[string]any{ + {"name": "Alice"}, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From c217f3cca6327a588446094cdfcb47e8a5741b1b Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Wed, 10 Jan 2024 17:07:14 -0500 Subject: [PATCH 44/60] test(i): Skip tests in change detector without split (#2188) ## Relevant issue(s) Resolves #2187 ## Description Skip tests in change detector without split. Previously when there was no split (i.e. only setup actions were present) the last action was being run twice, once in setup, and once during the assertion stage - this went unnoticed until unique secondary indexes started to fail when they added the same doc twice. --- tests/integration/utils2.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 3e38dba6db..9589cc1a9e 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -186,7 +186,7 @@ func executeTestCase( logging.NewKV("changeDetector.Repository", changeDetector.Repository), ) - startActionIndex, endActionIndex := getActionRange(testCase) + startActionIndex, endActionIndex := getActionRange(t, testCase) s := newState(ctx, t, testCase, dbt, clientType, collectionNames) setStartingNodes(s) @@ -555,7 +555,7 @@ func flattenActions(testCase *TestCase) { // // If a SetupComplete action is provided, the actions will be split there, if not // they will be split at the first non SchemaUpdate/CreateDoc/UpdateDoc action. -func getActionRange(testCase TestCase) (int, int) { +func getActionRange(t *testing.T, testCase TestCase) (int, int) { startIndex := 0 endIndex := len(testCase.Actions) - 1 @@ -598,8 +598,10 @@ ActionLoop: // We must not set this to -1 :) startIndex = firstNonSetupIndex } else { - // if we don't have any non-mutation actions, just use the last action - startIndex = endIndex + // if we don't have any non-mutation actions and the change detector is enabled + // skip this test as we will not gain anything from running (change detector would + // run an idential profile to a normal test run) + t.Skipf("no actions to execute") } } From 858e6a6298583801a0ac0a3f07cc619100872108 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Wed, 10 Jan 2024 17:20:25 -0500 Subject: [PATCH 45/60] fix(i): Retain embedded schema within gql types on reload (#2185) ## Relevant issue(s) Resolves #2184 #2182 ## Description Retain embedded schema within gql types on reload and further GQL modification. Also refactors the code a little to make it harder to re-introduce (calling `loadSchema` instead of sometimes relying on function vars). --- db/collection.go | 53 ++++++++--- db/description/schema.go | 42 +++++++++ db/schema.go | 29 ++---- db/view.go | 20 +--- tests/integration/schema/simple_test.go | 4 +- .../view/one_to_one/identical_schema_test.go | 54 ++++++++++- .../view/one_to_one/simple_test_test.go | 94 +++++++++++++++++++ .../view/one_to_one/with_restart_test.go | 88 +++++++++++++++++ 8 files changed, 333 insertions(+), 51 deletions(-) create mode 100644 tests/integration/view/one_to_one/simple_test_test.go create mode 100644 tests/integration/view/one_to_one/with_restart_test.go diff --git a/db/collection.go b/db/collection.go index 352d3fc8a1..f066c1d9fe 100644 --- a/db/collection.go +++ b/db/collection.go @@ -483,17 +483,7 @@ func (db *db) setDefaultSchemaVersion( } } - cols, err := db.getAllCollections(ctx, txn) - if err != nil { - return err - } - - definitions := make([]client.CollectionDefinition, len(cols)) - for i, col := range cols { - definitions[i] = col.Definition() - } - - return db.parser.SetSchema(ctx, txn, definitions) + return db.loadSchema(ctx, txn) } func (db *db) setDefaultSchemaVersionExplicit( @@ -633,6 +623,47 @@ func (db *db) getAllCollections(ctx context.Context, txn datastore.Txn) ([]clien return collections, nil } +// getAllActiveDefinitions returns all queryable collection/views and any embedded schema used by them. +func (db *db) getAllActiveDefinitions(ctx context.Context, txn datastore.Txn) ([]client.CollectionDefinition, error) { + cols, err := description.GetCollections(ctx, txn) + if err != nil { + return nil, err + } + + definitions := make([]client.CollectionDefinition, len(cols)) + for i, col := range cols { + schema, err := description.GetSchemaVersion(ctx, txn, col.SchemaVersionID) + if err != nil { + return nil, err + } + + collection := db.newCollection(col, schema) + + err = collection.loadIndexes(ctx, txn) + if err != nil { + return nil, err + } + + definitions[i] = collection.Definition() + } + + schemas, err := description.GetCollectionlessSchemas(ctx, txn) + if err != nil { + return nil, err + } + + for _, schema := range schemas { + definitions = append( + definitions, + client.CollectionDefinition{ + Schema: schema, + }, + ) + } + + return definitions, nil +} + // GetAllDocIDs returns all the document IDs that exist in the collection. // // @todo: We probably need a lock on the collection for this kind of op since diff --git a/db/description/schema.go b/db/description/schema.go index 06b129f3df..c486ee1a59 100644 --- a/db/description/schema.go +++ b/db/description/schema.go @@ -283,3 +283,45 @@ func GetSchemaVersionIDs( return schemaVersions, nil } + +// GetCollectionlessSchemas returns all schema that are not attached to a collection. +// +// Typically this means any schema embedded in a View. +// +// WARNING: This function does not currently account for multiple versions of collectionless schema, +// at the moment such a situation is impossible, but that is likely to change, at which point this +// function will need to account for that. +func GetCollectionlessSchemas( + ctx context.Context, + txn datastore.Txn, +) ([]client.SchemaDescription, error) { + cols, err := GetCollections(ctx, txn) + if err != nil { + return nil, err + } + + allSchemas, err := GetAllSchemas(ctx, txn) + if err != nil { + return nil, err + } + + schemaRootsByVersionID := map[string]string{} + for _, schema := range allSchemas { + schemaRootsByVersionID[schema.VersionID] = schema.Root + } + + colSchemaRoots := map[string]struct{}{} + for _, col := range cols { + schemaRoot := schemaRootsByVersionID[col.SchemaVersionID] + colSchemaRoots[schemaRoot] = struct{}{} + } + + collectionlessSchema := []client.SchemaDescription{} + for _, schema := range allSchemas { + if _, hasCollection := colSchemaRoots[schema.Root]; !hasCollection { + collectionlessSchema = append(collectionlessSchema, schema) + } + } + + return collectionlessSchema, nil +} diff --git a/db/schema.go b/db/schema.go index df95df60e2..988aea5e17 100644 --- a/db/schema.go +++ b/db/schema.go @@ -54,11 +54,6 @@ func (db *db) addSchema( return nil, err } - err = db.parser.SetSchema(ctx, txn, append(existingDefinitions, newDefinitions...)) - if err != nil { - return nil, err - } - returnDescriptions := make([]client.CollectionDescription, len(newDefinitions)) for i, definition := range newDefinitions { col, err := db.createCollection(ctx, txn, definition) @@ -68,20 +63,20 @@ func (db *db) addSchema( returnDescriptions[i] = col.Description() } + err = db.loadSchema(ctx, txn) + if err != nil { + return nil, err + } + return returnDescriptions, nil } func (db *db) loadSchema(ctx context.Context, txn datastore.Txn) error { - collections, err := db.getAllCollections(ctx, txn) + definitions, err := db.getAllActiveDefinitions(ctx, txn) if err != nil { return err } - definitions := make([]client.CollectionDefinition, len(collections)) - for i := range collections { - definitions[i] = collections[i].Definition() - } - return db.parser.SetSchema(ctx, txn, definitions) } @@ -150,17 +145,7 @@ func (db *db) patchSchema(ctx context.Context, txn datastore.Txn, patchString st } } - newCollections, err := db.getAllCollections(ctx, txn) - if err != nil { - return err - } - - definitions := make([]client.CollectionDefinition, len(newCollections)) - for i, col := range newCollections { - definitions[i] = col.Definition() - } - - return db.parser.SetSchema(ctx, txn, definitions) + return db.loadSchema(ctx, txn) } // substituteSchemaPatch handles any substitution of values that may be required before diff --git a/db/view.go b/db/view.go index dc04c83303..2b4666df22 100644 --- a/db/view.go +++ b/db/view.go @@ -60,21 +60,6 @@ func (db *db) addView( newDefinitions[i].Description.BaseQuery = baseQuery } - existingCollections, err := db.getAllCollections(ctx, txn) - if err != nil { - return nil, err - } - - existingDefinitions := make([]client.CollectionDefinition, len(existingCollections)) - for i := range existingCollections { - existingDefinitions[i] = existingCollections[i].Definition() - } - - err = db.parser.SetSchema(ctx, txn, append(existingDefinitions, newDefinitions...)) - if err != nil { - return nil, err - } - returnDescriptions := make([]client.CollectionDefinition, len(newDefinitions)) for i, definition := range newDefinitions { if definition.Description.Name == "" { @@ -95,5 +80,10 @@ func (db *db) addView( } } + err = db.loadSchema(ctx, txn) + if err != nil { + return nil, err + } + return returnDescriptions, nil } diff --git a/tests/integration/schema/simple_test.go b/tests/integration/schema/simple_test.go index ed8e05abf7..dccec9c4dd 100644 --- a/tests/integration/schema/simple_test.go +++ b/tests/integration/schema/simple_test.go @@ -78,7 +78,7 @@ func TestSchemaSimpleErrorsGivenDuplicateSchema(t *testing.T) { Schema: ` type Users {} `, - ExpectedError: "schema type already exists", + ExpectedError: "collection already exists", }, }, } @@ -94,7 +94,7 @@ func TestSchemaSimpleErrorsGivenDuplicateSchemaInSameSDL(t *testing.T) { type Users {} type Users {} `, - ExpectedError: "schema type already exists", + ExpectedError: "collection already exists", }, }, } diff --git a/tests/integration/view/one_to_one/identical_schema_test.go b/tests/integration/view/one_to_one/identical_schema_test.go index fb82303134..90248ede17 100644 --- a/tests/integration/view/one_to_one/identical_schema_test.go +++ b/tests/integration/view/one_to_one/identical_schema_test.go @@ -8,7 +8,7 @@ // by the Apache License, Version 2.0, included in the file // licenses/APL.txt. -package one_to_many +package one_to_one import ( "testing" @@ -92,3 +92,55 @@ func TestView_OneToOneSameSchema(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_OneToOneEmbeddedSchemaIsNotLostOnNextUpdate(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to one view followed by GQL type update", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + // After creating the view, update the system's types again and ensure + // that `BookView` is not forgotten. A GQL error would appear if this + // was broken as `AuthorView.books` would reference a type that does + // not exist. + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_one/simple_test_test.go b/tests/integration/view/one_to_one/simple_test_test.go new file mode 100644 index 0000000000..96967acf07 --- /dev/null +++ b/tests/integration/view/one_to_one/simple_test_test.go @@ -0,0 +1,94 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_OneToOneDuplicateEmbeddedSchema_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to one view and duplicate embedded schema", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + // Try and create a second view that creates a new `BookView`, this + // should error as `BookView` has already been created by the first view. + testUtils.CreateView{ + Query: ` + Author { + authorName: name + books { + bookName: name + } + } + `, + SDL: ` + type AuthorAliasView { + authorName: String + books: [BookView] + } + interface BookView { + bookName: String + } + `, + ExpectedError: "schema type already exists. Name: BookView", + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "BookView") { + name + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "BookView", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_one/with_restart_test.go b/tests/integration/view/one_to_one/with_restart_test.go new file mode 100644 index 0000000000..a17886867f --- /dev/null +++ b/tests/integration/view/one_to_one/with_restart_test.go @@ -0,0 +1,88 @@ +// Copyright 2024 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package one_to_one + +import ( + "testing" + + testUtils "github.com/sourcenetwork/defradb/tests/integration" +) + +func TestView_OneToOneEmbeddedSchemaIsNotLostORestart(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to one view and restart", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + // After creating the view, restart and ensure that `BookView` is not forgotten. + testUtils.Restart{}, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "AuthorView") { + name + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "AuthorView", + }, + }, + }, + testUtils.IntrospectionRequest{ + Request: ` + query { + __type (name: "BookView") { + name + } + } + `, + ExpectedData: map[string]any{ + "__type": map[string]any{ + "name": "BookView", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 72732f3e4debbd736b4975d66353a47a18e30a6e Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Wed, 10 Jan 2024 16:36:50 -0800 Subject: [PATCH 46/60] feat: Mutation typed input (#2167) ## Relevant issue(s) Resolves #2143 ## Description This PR adds a typed input object for create and update mutations. ~~As a side effect of this change the relationship alias has been replaced with the `_id` input field.~~ Relational sub-documents cannot be created from mutation input in this implementation. Related SIP https://github.com/sourcenetwork/SIPs/discussions/10 ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? Make test Specify the platform(s) on which this was tested: - MacOS --- client/document.go | 4 + client/request/consts.go | 2 +- client/request/mutation.go | 2 +- planner/create.go | 28 ++-- planner/explain.go | 2 +- planner/mapper/mapper.go | 2 +- planner/mapper/mutation.go | 7 +- planner/update.go | 18 +-- request/graphql/parser/mutation.go | 50 ++++++- request/graphql/schema/descriptions.go | 7 - request/graphql/schema/errors.go | 61 ++++---- request/graphql/schema/generate.go | 130 ++++++++++++------ .../events/simple/with_create_txn_test.go | 4 +- .../integration/explain/debug/create_test.go | 4 +- .../integration/explain/debug/update_test.go | 8 +- .../explain/default/create_test.go | 12 +- .../explain/default/update_test.go | 24 ++-- .../explain/execute/create_test.go | 2 +- .../explain/execute/update_test.go | 4 +- tests/integration/gql.go | 68 +++++++++ .../create/field_kinds/one_to_many/utils.go | 3 +- .../one_to_many/with_alias_test.go | 14 ++ .../one_to_many/with_simple_test.go | 14 ++ .../create/field_kinds/one_to_one/utils.go | 3 +- .../field_kinds/one_to_one/with_alias_test.go | 8 ++ .../one_to_one/with_simple_test.go | 8 ++ .../one_to_one_to_one/with_txn_test.go | 8 +- .../mutation/create/simple_test.go | 18 ++- .../mutation/create/with_version_test.go | 2 +- .../integration/mutation/mix/with_txn_test.go | 12 +- .../special/invalid_operation_test.go | 2 +- .../mutation/update/crdt/pncounter_test.go | 8 ++ .../update/field_kinds/date_time_test.go | 2 +- .../field_kinds/one_to_many/simple_test.go | 14 ++ .../one_to_many/with_alias_test.go | 6 + .../field_kinds/one_to_one/with_alias_test.go | 2 +- .../one_to_one/with_simple_test.go | 6 +- .../mutation/update/with_filter_test.go | 4 +- .../mutation/update/with_id_test.go | 4 +- .../mutation/update/with_ids_test.go | 2 +- .../query/one_to_many/with_id_field_test.go | 8 ++ .../migrations/query/with_update_test.go | 2 +- .../field/kind/foreign_object_array_test.go | 6 +- .../add/field/kind/foreign_object_test.go | 4 +- .../subscription/subscription_test.go | 16 +-- tests/integration/utils2.go | 13 +- 46 files changed, 429 insertions(+), 199 deletions(-) create mode 100644 tests/integration/gql.go diff --git a/client/document.go b/client/document.go index 8fb5e28868..93e06df27e 100644 --- a/client/document.go +++ b/client/document.go @@ -251,6 +251,8 @@ func getFloat64(v any) (float64, error) { return val.Float64() case int: return float64(val), nil + case int32: + return float64(val), nil case int64: return float64(val), nil case float64: @@ -266,6 +268,8 @@ func getInt64(v any) (int64, error) { return val.Int64() case int: return int64(val), nil + case int32: + return int64(val), nil case int64: return val, nil case float64: diff --git a/client/request/consts.go b/client/request/consts.go index 85b7d63d84..1a1d653a25 100644 --- a/client/request/consts.go +++ b/client/request/consts.go @@ -20,7 +20,7 @@ const ( RelatedObjectID = "_id" Cid = "cid" - Data = "data" + Input = "input" FieldName = "field" FieldIDName = "fieldId" ShowDeleted = "showDeleted" diff --git a/client/request/mutation.go b/client/request/mutation.go index 3d19210458..6bff180dd9 100644 --- a/client/request/mutation.go +++ b/client/request/mutation.go @@ -33,7 +33,7 @@ type ObjectMutation struct { IDs immutable.Option[[]string] Filter immutable.Option[Filter] - Data string + Input map[string]any Fields []Selection } diff --git a/planner/create.go b/planner/create.go index e272c80722..a03c429da9 100644 --- a/planner/create.go +++ b/planner/create.go @@ -11,8 +11,6 @@ package planner import ( - "encoding/json" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" "github.com/sourcenetwork/defradb/core" @@ -37,9 +35,9 @@ type createNode struct { // collection name, meta-data, etc. collection client.Collection - // newDoc is the JSON string of the new document, unparsed - newDocStr string - doc *client.Document + // input map of fields and values + input map[string]any + doc *client.Document err error @@ -59,7 +57,7 @@ func (n *createNode) Kind() string { return "createNode" } func (n *createNode) Init() error { return nil } func (n *createNode) Start() error { - doc, err := client.NewDocFromJSON([]byte(n.newDocStr), n.collection.Schema()) + doc, err := client.NewDocFromMap(n.input, n.collection.Schema()) if err != nil { n.err = err return err @@ -135,24 +133,14 @@ func (n *createNode) Close() error { func (n *createNode) Source() planNode { return n.results } -func (n *createNode) simpleExplain() (map[string]any, error) { - data := map[string]any{} - err := json.Unmarshal([]byte(n.newDocStr), &data) - if err != nil { - return nil, err - } - - return map[string]any{ - dataLabel: data, - }, nil -} - // Explain method returns a map containing all attributes of this node that // are to be explained, subscribes / opts-in this node to be an explainablePlanNode. func (n *createNode) Explain(explainType request.ExplainType) (map[string]any, error) { switch explainType { case request.SimpleExplain: - return n.simpleExplain() + return map[string]any{ + inputLabel: n.input, + }, nil case request.ExecuteExplain: return map[string]any{ @@ -173,7 +161,7 @@ func (p *Planner) CreateDoc(parsed *mapper.Mutation) (planNode, error) { // create a mutation createNode. create := &createNode{ p: p, - newDocStr: parsed.Data, + input: parsed.Input, results: results, docMapper: docMapper{parsed.DocumentMapping}, } diff --git a/planner/explain.go b/planner/explain.go index 76e562dc94..5ab2f292f8 100644 --- a/planner/explain.go +++ b/planner/explain.go @@ -53,7 +53,7 @@ const ( childFieldNameLabel = "childFieldName" collectionIDLabel = "collectionID" collectionNameLabel = "collectionName" - dataLabel = "data" + inputLabel = "input" fieldNameLabel = "fieldName" filterLabel = "filter" joinRootLabel = "root" diff --git a/planner/mapper/mapper.go b/planner/mapper/mapper.go index ff7e19ff21..06772be487 100644 --- a/planner/mapper/mapper.go +++ b/planner/mapper/mapper.go @@ -1089,7 +1089,7 @@ func ToMutation(ctx context.Context, store client.Store, mutationRequest *reques return &Mutation{ Select: *underlyingSelect, Type: MutationType(mutationRequest.Type), - Data: mutationRequest.Data, + Input: mutationRequest.Input, }, nil } diff --git a/planner/mapper/mutation.go b/planner/mapper/mutation.go index c3c5829294..a38444e01c 100644 --- a/planner/mapper/mutation.go +++ b/planner/mapper/mutation.go @@ -27,9 +27,8 @@ type Mutation struct { // The type of mutation. For example a create request. Type MutationType - // The data to be used for the mutation. For example, during a create this - // will be the json representation of the object to be inserted. - Data string + // Input is the map of fields and values used for the mutation. + Input map[string]any } func (m *Mutation) CloneTo(index int) Requestable { @@ -40,6 +39,6 @@ func (m *Mutation) cloneTo(index int) *Mutation { return &Mutation{ Select: *m.Select.cloneTo(index), Type: m.Type, - Data: m.Data, + Input: m.Input, } } diff --git a/planner/update.go b/planner/update.go index 78619bd55f..077ceb39e4 100644 --- a/planner/update.go +++ b/planner/update.go @@ -31,7 +31,8 @@ type updateNode struct { docIDs []string - patch string + // input map of fields and values + input map[string]any isUpdating bool @@ -67,7 +68,11 @@ func (n *updateNode) Next() (bool, error) { if err != nil { return false, err } - _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, n.patch) + patch, err := json.Marshal(n.input) + if err != nil { + return false, err + } + _, err = n.collection.UpdateWithDocID(n.p.ctx, docID, string(patch)) if err != nil { return false, err } @@ -126,12 +131,7 @@ func (n *updateNode) simpleExplain() (map[string]any, error) { } // Add the attribute that represents the patch to update with. - data := map[string]any{} - err := json.Unmarshal([]byte(n.patch), &data) - if err != nil { - return nil, err - } - simpleExplainMap[dataLabel] = data + simpleExplainMap[inputLabel] = n.input return simpleExplainMap, nil } @@ -160,7 +160,7 @@ func (p *Planner) UpdateDocs(parsed *mapper.Mutation) (planNode, error) { filter: parsed.Filter, docIDs: parsed.DocIDs.Value(), isUpdating: true, - patch: parsed.Data, + input: parsed.Input, docMapper: docMapper{parsed.DocumentMapping}, } diff --git a/request/graphql/parser/mutation.go b/request/graphql/parser/mutation.go index 0802c745d6..27becabb71 100644 --- a/request/graphql/parser/mutation.go +++ b/request/graphql/parser/mutation.go @@ -99,12 +99,9 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re for _, argument := range field.Arguments { prop := argument.Name.Value // parse each individual arg type seperately - if prop == request.Data { // parse data - raw := argument.Value.(*ast.StringValue) - if raw.Value == "" { - return nil, ErrEmptyDataPayload - } - mut.Data = raw.Value + if prop == request.Input { // parse input + raw := argument.Value.(*ast.ObjectValue) + mut.Input = parseMutationInputObject(raw) } else if prop == request.FilterClause { // parse filter obj := argument.Value.(*ast.ObjectValue) filterType, ok := getArgumentType(fieldDef, request.FilterClause) @@ -147,3 +144,44 @@ func parseMutation(schema gql.Schema, parent *gql.Object, field *ast.Field) (*re mut.Fields, err = parseSelectFields(schema, request.ObjectSelection, fieldObject, field.SelectionSet) return mut, err } + +// parseMutationInput parses the correct underlying +// value type of the given ast.Value +func parseMutationInput(val ast.Value) any { + switch t := val.(type) { + case *ast.IntValue: + return gql.Int.ParseLiteral(val) + case *ast.FloatValue: + return gql.Float.ParseLiteral(val) + case *ast.BooleanValue: + return t.Value + case *ast.StringValue: + return t.Value + case *ast.ObjectValue: + return parseMutationInputObject(t) + case *ast.ListValue: + return parseMutationInputList(t) + default: + return val.GetValue() + } +} + +// parseMutationInputList parses the correct underlying +// value type for all of the values in the ast.ListValue +func parseMutationInputList(val *ast.ListValue) []any { + list := make([]any, 0) + for _, val := range val.Values { + list = append(list, parseMutationInput(val)) + } + return list +} + +// parseMutationInputObject parses the correct underlying +// value type for all of the fields in the ast.ObjectValue +func parseMutationInputObject(val *ast.ObjectValue) map[string]any { + obj := make(map[string]any) + for _, field := range val.Fields { + obj[field.Name.Value] = parseMutationInput(field.Value) + } + return obj +} diff --git a/request/graphql/schema/descriptions.go b/request/graphql/schema/descriptions.go index 5ab76ff726..147c494c74 100644 --- a/request/graphql/schema/descriptions.go +++ b/request/graphql/schema/descriptions.go @@ -125,9 +125,6 @@ An optional value that specifies as to whether deleted documents may be ` createDocumentDescription string = ` Creates a single document of this type using the data provided. -` - createDataArgDescription string = ` -The json representation of the document you wish to create. Required. ` updateDocumentsDescription string = ` Updates documents in this collection using the data provided. Only documents @@ -148,10 +145,6 @@ An optional set of docID values that will limit the update to documents An optional filter for this update that will limit the update to the documents matching the given criteria. If no matching documents are found, the operation will succeed, but no documents will be updated. -` - updateDataArgDescription string = ` -The json representation of the fields to update and their new values. Required. - Fields not explicitly mentioned here will not be updated. ` deleteDocumentsDescription string = ` Deletes documents in this collection matching any provided criteria. If no diff --git a/request/graphql/schema/errors.go b/request/graphql/schema/errors.go index cf28c7d710..39bbbd803a 100644 --- a/request/graphql/schema/errors.go +++ b/request/graphql/schema/errors.go @@ -13,35 +13,37 @@ package schema import "github.com/sourcenetwork/defradb/errors" const ( - errDuplicateField string = "duplicate field" - errFieldMissingRelation string = "field missing associated relation" - errRelationMissingField string = "relation missing field" - errAggregateTargetNotFound string = "aggregate target not found" - errSchemaTypeAlreadyExist string = "schema type already exists" - errObjectNotFoundDuringThunk string = "object not found whilst executing fields thunk" - errTypeNotFound string = "no type found for given name" - errRelationNotFound string = "no relation found" - errNonNullForTypeNotSupported string = "NonNull variants for type are not supported" - errIndexMissingFields string = "index missing fields" - errIndexUnknownArgument string = "index with unknown argument" - errIndexInvalidArgument string = "index with invalid argument" - errIndexInvalidName string = "index with invalid name" + errDuplicateField string = "duplicate field" + errFieldMissingRelation string = "field missing associated relation" + errRelationMissingField string = "relation missing field" + errAggregateTargetNotFound string = "aggregate target not found" + errSchemaTypeAlreadyExist string = "schema type already exists" + errMutationInputTypeAlreadyExist string = "mutation input type already exists" + errObjectNotFoundDuringThunk string = "object not found whilst executing fields thunk" + errTypeNotFound string = "no type found for given name" + errRelationNotFound string = "no relation found" + errNonNullForTypeNotSupported string = "NonNull variants for type are not supported" + errIndexMissingFields string = "index missing fields" + errIndexUnknownArgument string = "index with unknown argument" + errIndexInvalidArgument string = "index with invalid argument" + errIndexInvalidName string = "index with invalid name" ) var ( - ErrDuplicateField = errors.New(errDuplicateField) - ErrFieldMissingRelation = errors.New(errFieldMissingRelation) - ErrRelationMissingField = errors.New(errRelationMissingField) - ErrAggregateTargetNotFound = errors.New(errAggregateTargetNotFound) - ErrSchemaTypeAlreadyExist = errors.New(errSchemaTypeAlreadyExist) - ErrObjectNotFoundDuringThunk = errors.New(errObjectNotFoundDuringThunk) - ErrTypeNotFound = errors.New(errTypeNotFound) - ErrRelationNotFound = errors.New(errRelationNotFound) - ErrNonNullForTypeNotSupported = errors.New(errNonNullForTypeNotSupported) - ErrRelationMutlipleTypes = errors.New("relation type can only be either One or Many, not both") - ErrRelationMissingTypes = errors.New("relation is missing its defined types and fields") - ErrRelationInvalidType = errors.New("relation has an invalid type to be finalize") - ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") + ErrDuplicateField = errors.New(errDuplicateField) + ErrFieldMissingRelation = errors.New(errFieldMissingRelation) + ErrRelationMissingField = errors.New(errRelationMissingField) + ErrAggregateTargetNotFound = errors.New(errAggregateTargetNotFound) + ErrSchemaTypeAlreadyExist = errors.New(errSchemaTypeAlreadyExist) + ErrMutationInputTypeAlreadyExist = errors.New(errMutationInputTypeAlreadyExist) + ErrObjectNotFoundDuringThunk = errors.New(errObjectNotFoundDuringThunk) + ErrTypeNotFound = errors.New(errTypeNotFound) + ErrRelationNotFound = errors.New(errRelationNotFound) + ErrNonNullForTypeNotSupported = errors.New(errNonNullForTypeNotSupported) + ErrRelationMutlipleTypes = errors.New("relation type can only be either One or Many, not both") + ErrRelationMissingTypes = errors.New("relation is missing its defined types and fields") + ErrRelationInvalidType = errors.New("relation has an invalid type to be finalize") + ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") // NonNull is the literal name of the GQL type, so we have to disable the linter //nolint:revive ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") @@ -94,6 +96,13 @@ func NewErrSchemaTypeAlreadyExist(name string) error { ) } +func NewErrMutationInputTypeAlreadyExist(name string) error { + return errors.New( + errMutationInputTypeAlreadyExist, + errors.NewKV("Name", name), + ) +} + func NewErrObjectNotFoundDuringThunk(object string) error { return errors.New( errObjectNotFoundDuringThunk, diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index 556700cd7f..9e1d67a9c8 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -13,6 +13,7 @@ package schema import ( "context" "fmt" + "strings" gql "github.com/sourcenetwork/graphql-go" @@ -85,6 +86,11 @@ func (g *Generator) generate(ctx context.Context, collections []client.Collectio if err != nil { return nil, err } + // build mutation input types + err = g.buildMutationInputTypes(collections) + if err != nil { + return nil, err + } // resolve types if err := g.manager.ResolveTypes(); err != nil { return nil, err @@ -407,6 +413,7 @@ func (g *Generator) buildTypes( for _, c := range collections { // Copy the loop variable before usage within the loop or it // will be reassigned before the thunk is run + // TODO remove when Go 1.22 collection := c fieldDescriptions := collection.Schema.Fields isEmbeddedObject := collection.Description.Name == "" @@ -517,6 +524,67 @@ func (g *Generator) buildTypes( return objs, nil } +// buildMutationInputTypes creates the input object types +// for collection create and update mutation operations. +func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefinition) error { + for _, c := range collections { + // Copy the loop variable before usage within the loop or it + // will be reassigned before the thunk is run + // TODO remove when Go 1.22 + collection := c + fieldDescriptions := collection.Schema.Fields + mutationInputName := collection.Description.Name + "MutationInputArg" + + // check if mutation input type exists + if _, ok := g.manager.schema.TypeMap()[mutationInputName]; ok { + return NewErrMutationInputTypeAlreadyExist(mutationInputName) + } + + mutationObjConf := gql.InputObjectConfig{ + Name: mutationInputName, + } + + // Wrap mutation input object definition in a thunk so we can + // handle any embedded object which is defined + // at a future point in time. + mutationObjConf.Fields = (gql.InputObjectConfigFieldMapThunk)(func() (gql.InputObjectConfigFieldMap, error) { + fields := make(gql.InputObjectConfigFieldMap) + + for _, field := range fieldDescriptions { + if strings.HasPrefix(field.Name, "_") { + // ignore system defined args as the + // user cannot override their values + continue + } + + var ttype gql.Type + if field.Kind == client.FieldKind_FOREIGN_OBJECT { + ttype = gql.ID + } else if field.Kind == client.FieldKind_FOREIGN_OBJECT_ARRAY { + ttype = gql.NewList(gql.ID) + } else { + var ok bool + ttype, ok = fieldKindToGQLType[field.Kind] + if !ok { + return nil, NewErrTypeNotFound(fmt.Sprint(field.Kind)) + } + } + + fields[field.Name] = &gql.InputObjectFieldConfig{ + Type: ttype, + } + } + + return fields, nil + }) + + mutationObj := gql.NewInputObject(mutationObjConf) + g.manager.schema.TypeMap()[mutationObj.Name()] = mutationObj + } + + return nil +} + func (g *Generator) genAggregateFields(ctx context.Context) error { topLevelCountInputs := map[string]*gql.InputObject{} topLevelNumericAggInputs := map[string]*gql.InputObject{} @@ -950,79 +1018,51 @@ func (g *Generator) GenerateMutationInputForGQLType(obj *gql.Object) ([]*gql.Fie return nil, obj.Error() } - typeName := obj.Name() - filter, ok := g.manager.schema.TypeMap()[typeName+"FilterArg"].(*gql.InputObject) - if !ok { - return nil, NewErrTypeNotFound(typeName + "FilterArg") - } - - return g.genTypeMutationFields(obj, filter) -} + filterInputName := genTypeName(obj, "FilterArg") + mutationInputName := genTypeName(obj, "MutationInputArg") -func (g *Generator) genTypeMutationFields( - obj *gql.Object, - filterInput *gql.InputObject, -) ([]*gql.Field, error) { - create, err := g.genTypeMutationCreateField(obj) - if err != nil { - return nil, err - } - update, err := g.genTypeMutationUpdateField(obj, filterInput) - if err != nil { - return nil, err + filterInput, ok := g.manager.schema.TypeMap()[filterInputName].(*gql.InputObject) + if !ok { + return nil, NewErrTypeNotFound(filterInputName) } - delete, err := g.genTypeMutationDeleteField(obj, filterInput) - if err != nil { - return nil, err + mutationInput, ok := g.manager.schema.TypeMap()[mutationInputName] + if !ok { + return nil, NewErrTypeNotFound(mutationInputName) } - return []*gql.Field{create, update, delete}, nil -} -func (g *Generator) genTypeMutationCreateField(obj *gql.Object) (*gql.Field, error) { - field := &gql.Field{ + create := &gql.Field{ Name: "create_" + obj.Name(), Description: createDocumentDescription, Type: obj, Args: gql.FieldConfigArgument{ - "data": schemaTypes.NewArgConfig(gql.String, createDataArgDescription), + "input": schemaTypes.NewArgConfig(mutationInput, "Create field values"), }, } - return field, nil -} -func (g *Generator) genTypeMutationUpdateField( - obj *gql.Object, - filter *gql.InputObject, -) (*gql.Field, error) { - field := &gql.Field{ + update := &gql.Field{ Name: "update_" + obj.Name(), Description: updateDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, updateIDArgDescription), request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), updateIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filter, updateFilterArgDescription), - "data": schemaTypes.NewArgConfig(gql.String, updateDataArgDescription), + "filter": schemaTypes.NewArgConfig(filterInput, updateFilterArgDescription), + "input": schemaTypes.NewArgConfig(mutationInput, "Update field values"), }, } - return field, nil -} -func (g *Generator) genTypeMutationDeleteField( - obj *gql.Object, - filter *gql.InputObject, -) (*gql.Field, error) { - field := &gql.Field{ + delete := &gql.Field{ Name: "delete_" + obj.Name(), Description: deleteDocumentsDescription, Type: gql.NewList(obj), Args: gql.FieldConfigArgument{ request.DocIDArgName: schemaTypes.NewArgConfig(gql.ID, deleteIDArgDescription), request.DocIDsArgName: schemaTypes.NewArgConfig(gql.NewList(gql.ID), deleteIDsArgDescription), - "filter": schemaTypes.NewArgConfig(filter, deleteFilterArgDescription), + "filter": schemaTypes.NewArgConfig(filterInput, deleteFilterArgDescription), }, } - return field, nil + + return []*gql.Field{create, update, delete}, nil } func (g *Generator) genTypeFieldsEnum(obj *gql.Object) *gql.Enum { diff --git a/tests/integration/events/simple/with_create_txn_test.go b/tests/integration/events/simple/with_create_txn_test.go index 962a16e39a..c890792157 100644 --- a/tests/integration/events/simple/with_create_txn_test.go +++ b/tests/integration/events/simple/with_create_txn_test.go @@ -28,7 +28,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { r := d.ExecRequest( ctx, `mutation { - create_Users(data: "{\"name\": \"John\"}") { + create_Users(input: {name: "John"}) { _docID } }`, @@ -43,7 +43,7 @@ func TestEventsSimpleWithCreateWithTxnDiscarded(t *testing.T) { r := d.WithTxn(txn).ExecRequest( ctx, `mutation { - create_Users(data: "{\"name\": \"Shahzad\"}") { + create_Users(input: {name: "Shahzad"}) { _docID } }`, diff --git a/tests/integration/explain/debug/create_test.go b/tests/integration/explain/debug/create_test.go index 029c0eaefe..21d334bb84 100644 --- a/tests/integration/explain/debug/create_test.go +++ b/tests/integration/explain/debug/create_test.go @@ -39,7 +39,7 @@ func TestDebugExplainMutationRequestWithCreate(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - create_Author(data: "{\"name\": \"Shahzad Lone\",\"age\": 27,\"verified\": true}") { + create_Author(input: {name: "Shahzad Lone", age: 27, verified: true}) { name age } @@ -63,7 +63,7 @@ func TestDebugExplainMutationRequestDoesNotCreateDocGivenDuplicate(t *testing.T) testUtils.ExplainRequest{ Request: `mutation @explain(type: debug) { - create_Author(data: "{\"name\": \"Shahzad Lone\",\"age\": 27}") { + create_Author(input: {name: "Shahzad Lone", age: 27}) { name age } diff --git a/tests/integration/explain/debug/update_test.go b/tests/integration/explain/debug/update_test.go index 8c8ed82f0b..d9c190ca0c 100644 --- a/tests/integration/explain/debug/update_test.go +++ b/tests/integration/explain/debug/update_test.go @@ -46,7 +46,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) { _eq: true } }, - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -78,7 +78,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingIds(t *testing.T) { "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -107,7 +107,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingId(t *testing.T) { Request: `mutation @explain(type: debug) { update_Author( docID: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -144,7 +144,7 @@ func TestDebugExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) { "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name diff --git a/tests/integration/explain/default/create_test.go b/tests/integration/explain/default/create_test.go index eaecd7879f..dc57671bdd 100644 --- a/tests/integration/explain/default/create_test.go +++ b/tests/integration/explain/default/create_test.go @@ -39,7 +39,7 @@ func TestDefaultExplainMutationRequestWithCreate(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain { - create_Author(data: "{\"name\": \"Shahzad Lone\",\"age\": 27,\"verified\": true}") { + create_Author(input: {name: "Shahzad Lone", age: 27, verified: true}) { name age } @@ -52,8 +52,8 @@ func TestDefaultExplainMutationRequestWithCreate(t *testing.T) { TargetNodeName: "createNode", IncludeChildNodes: false, ExpectedAttributes: dataMap{ - "data": dataMap{ - "age": float64(27), + "input": dataMap{ + "age": int32(27), "name": "Shahzad Lone", "verified": true, }, @@ -77,7 +77,7 @@ func TestDefaultExplainMutationRequestDoesNotCreateDocGivenDuplicate(t *testing. testUtils.ExplainRequest{ Request: `mutation @explain { - create_Author(data: "{\"name\": \"Shahzad Lone\",\"age\": 27}") { + create_Author(input: {name: "Shahzad Lone", age: 27}) { name age } @@ -90,8 +90,8 @@ func TestDefaultExplainMutationRequestDoesNotCreateDocGivenDuplicate(t *testing. TargetNodeName: "createNode", IncludeChildNodes: false, ExpectedAttributes: dataMap{ - "data": dataMap{ - "age": float64(27), + "input": dataMap{ + "age": int32(27), "name": "Shahzad Lone", }, }, diff --git a/tests/integration/explain/default/update_test.go b/tests/integration/explain/default/update_test.go index cd2af141c3..e2d52e8b73 100644 --- a/tests/integration/explain/default/update_test.go +++ b/tests/integration/explain/default/update_test.go @@ -46,7 +46,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) _eq: true } }, - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -61,8 +61,8 @@ func TestDefaultExplainMutationRequestWithUpdateUsingBooleanFilter(t *testing.T) TargetNodeName: "updateNode", IncludeChildNodes: false, ExpectedAttributes: dataMap{ - "data": dataMap{ - "age": float64(59), + "input": dataMap{ + "age": int32(59), }, "filter": dataMap{ "verified": dataMap{ @@ -115,7 +115,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -130,8 +130,8 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIds(t *testing.T) { TargetNodeName: "updateNode", IncludeChildNodes: false, ExpectedAttributes: dataMap{ - "data": dataMap{ - "age": float64(59), + "input": dataMap{ + "age": int32(59), }, "filter": nil, "docIDs": []string{ @@ -180,7 +180,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { Request: `mutation @explain { update_Author( docID: "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -195,8 +195,8 @@ func TestDefaultExplainMutationRequestWithUpdateUsingId(t *testing.T) { TargetNodeName: "updateNode", IncludeChildNodes: false, ExpectedAttributes: dataMap{ - "data": dataMap{ - "age": float64(59), + "input": dataMap{ + "age": int32(59), }, "filter": nil, "docIDs": []string{ @@ -248,7 +248,7 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) "bae-bfbfc89c-0d63-5ea4-81a3-3ebd295be67f", "bae-079d0bd8-4b1b-5f5f-bd95-4d915c277f9d" ], - data: "{\"age\": 59}" + input: {age: 59} ) { _docID name @@ -263,8 +263,8 @@ func TestDefaultExplainMutationRequestWithUpdateUsingIdsAndFilter(t *testing.T) TargetNodeName: "updateNode", IncludeChildNodes: false, ExpectedAttributes: dataMap{ - "data": dataMap{ - "age": float64(59), + "input": dataMap{ + "age": int32(59), }, "filter": dataMap{ "verified": dataMap{ diff --git a/tests/integration/explain/execute/create_test.go b/tests/integration/explain/execute/create_test.go index bd99ab39a4..58736edb90 100644 --- a/tests/integration/explain/execute/create_test.go +++ b/tests/integration/explain/execute/create_test.go @@ -27,7 +27,7 @@ func TestExecuteExplainMutationRequestWithCreate(t *testing.T) { testUtils.ExplainRequest{ Request: `mutation @explain(type: execute) { - create_Author(data: "{\"name\": \"Shahzad Lone\",\"age\": 27,\"verified\": true}") { + create_Author(input: {name: "Shahzad Lone", age: 27, verified: true}) { name } }`, diff --git a/tests/integration/explain/execute/update_test.go b/tests/integration/explain/execute/update_test.go index fa54f7f331..4f7a262136 100644 --- a/tests/integration/explain/execute/update_test.go +++ b/tests/integration/explain/execute/update_test.go @@ -35,7 +35,7 @@ func TestExecuteExplainMutationRequestWithUpdateUsingIDs(t *testing.T) { "bae-c8448e47-6cd1-571f-90bd-364acb80da7b", "bae-f01bf83f-1507-5fb5-a6a3-09ecffa3c692" ], - data: "{\"country\": \"USA\"}" + input: {country: "USA"} ) { country city @@ -93,7 +93,7 @@ func TestExecuteExplainMutationRequestWithUpdateUsingFilter(t *testing.T) { _eq: "Waterloo" } }, - data: "{\"country\": \"USA\"}" + input: {country: "USA"} ) { country city diff --git a/tests/integration/gql.go b/tests/integration/gql.go new file mode 100644 index 0000000000..22a368adf7 --- /dev/null +++ b/tests/integration/gql.go @@ -0,0 +1,68 @@ +// Copyright 2023 Democratized Data Foundation +// +// Use of this software is governed by the Business Source License +// included in the file licenses/BSL.txt. +// +// As of the Change Date specified in that file, in accordance with +// the Business Source License, use of this software will be governed +// by the Apache License, Version 2.0, included in the file +// licenses/APL.txt. + +package tests + +import ( + "encoding/json" + "fmt" + "strings" +) + +// jsonToGql transforms a json doc string to a gql string. +func jsonToGQL(val string) (string, error) { + var doc map[string]any + if err := json.Unmarshal([]byte(val), &doc); err != nil { + return "", err + } + return mapToGQL(doc) +} + +// valueToGQL transforms a value to a gql string. +func valueToGQL(val any) (string, error) { + switch t := val.(type) { + case map[string]any: + return mapToGQL(t) + + case []any: + return sliceToGQL(t) + } + out, err := json.Marshal(val) + if err != nil { + return "", err + } + return string(out), nil +} + +// mapToGql transforms a map to a gql string. +func mapToGQL(val map[string]any) (string, error) { + var entries []string + for k, v := range val { + out, err := valueToGQL(v) + if err != nil { + return "", err + } + entries = append(entries, fmt.Sprintf("%s: %s", k, out)) + } + return fmt.Sprintf("{%s}", strings.Join(entries, ",")), nil +} + +// sliceToGQL transforms a slice to a gql string. +func sliceToGQL(val []any) (string, error) { + var entries []string + for _, v := range val { + out, err := valueToGQL(v) + if err != nil { + return "", err + } + entries = append(entries, out) + } + return fmt.Sprintf("[%s]", strings.Join(entries, ",")), nil +} diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/utils.go b/tests/integration/mutation/create/field_kinds/one_to_many/utils.go index 21b9524567..c4ef949e53 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/utils.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/utils.go @@ -20,7 +20,8 @@ func executeTestCase(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ - Description: test.Description, + Description: test.Description, + SupportedMutationTypes: test.SupportedMutationTypes, Actions: append( []any{ testUtils.SchemaUpdate{ diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go index 3b37756b6c..27ddcf0e68 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_alias_test.go @@ -14,12 +14,20 @@ import ( "fmt" "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" ) func TestMutationCreateOneToMany_AliasedRelationNameWithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, with an invalid field, with alias.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ Doc: `{ @@ -36,6 +44,12 @@ func TestMutationCreateOneToMany_AliasedRelationNameWithInvalidField_Error(t *te func TestMutationCreateOneToMany_AliasedRelationNameNonExistingRelationSingleSide_NoIDFieldError(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, non-existing id, from the single side, no id relation field, with alias.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 0, diff --git a/tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go index e72d7d218e..2a8b64d1b1 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_many/with_simple_test.go @@ -15,11 +15,19 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) func TestMutationCreateOneToMany_WithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, with an invalid field.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ Doc: `{ @@ -36,6 +44,12 @@ func TestMutationCreateOneToMany_WithInvalidField_Error(t *testing.T) { func TestMutationCreateOneToMany_NonExistingRelationSingleSide_NoIDFieldError(t *testing.T) { test := testUtils.TestCase{ Description: "One to many create mutation, non-existing id, from the single side, no id relation field.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 0, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/utils.go b/tests/integration/mutation/create/field_kinds/one_to_one/utils.go index 4b5d33f618..8cd920a063 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/utils.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/utils.go @@ -20,7 +20,8 @@ func executeTestCase(t *testing.T, test testUtils.TestCase) { testUtils.ExecuteTestCase( t, testUtils.TestCase{ - Description: test.Description, + Description: test.Description, + SupportedMutationTypes: test.SupportedMutationTypes, Actions: append( []any{ testUtils.SchemaUpdate{ diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go index da8bd1b7b0..18d4a2e13c 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_alias_test.go @@ -14,12 +14,20 @@ import ( "fmt" "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" ) func TestMutationCreateOneToOne_UseAliasWithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, alias relation, with an invalid field.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go index cf985bfa18..30545d6e7c 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one/with_simple_test.go @@ -15,11 +15,19 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) func TestMutationCreateOneToOne_WithInvalidField_Error(t *testing.T) { test := testUtils.TestCase{ Description: "One to one create mutation, with an invalid field.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go index accf929402..64272779d3 100644 --- a/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go +++ b/tests/integration/mutation/create/field_kinds/one_to_one_to_one/with_txn_test.go @@ -42,7 +42,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - create_Book(data: "{\"name\": \"Book By Website\",\"rating\": 4.0, \"publisher_id\": \"bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4\"}") { + create_Book(input: {name: "Book By Website", rating: 4.0, publisher_id: "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4"}) { _docID } }`, @@ -55,7 +55,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsForward(t *testing. testUtils.Request{ TransactionID: immutable.Some(1), Request: `mutation { - create_Book(data: "{\"name\": \"Book By Online\",\"rating\": 4.0, \"publisher_id\": \"bae-8a381044-9206-51e7-8bc8-dc683d5f2523\"}") { + create_Book(input: {name: "Book By Online", rating: 4.0, publisher_id: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523"}) { _docID } }`, @@ -194,7 +194,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - create_Book(data: "{\"name\": \"Book By Website\",\"rating\": 4.0, \"publisher_id\": \"bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4\"}") { + create_Book(input: {name: "Book By Website", rating: 4.0, publisher_id: "bae-0e7c3bb5-4917-5d98-9fcf-b9db369ea6e4"}) { _docID } }`, @@ -207,7 +207,7 @@ func TestTransactionalCreationAndLinkingOfRelationalDocumentsBackward(t *testing testUtils.Request{ TransactionID: immutable.Some(1), Request: `mutation { - create_Book(data: "{\"name\": \"Book By Online\",\"rating\": 4.0, \"publisher_id\": \"bae-8a381044-9206-51e7-8bc8-dc683d5f2523\"}") { + create_Book(input: {name: "Book By Online", rating: 4.0, publisher_id: "bae-8a381044-9206-51e7-8bc8-dc683d5f2523"}) { _docID } }`, diff --git a/tests/integration/mutation/create/simple_test.go b/tests/integration/mutation/create/simple_test.go index cedac8c58e..d095fdfc6d 100644 --- a/tests/integration/mutation/create/simple_test.go +++ b/tests/integration/mutation/create/simple_test.go @@ -21,6 +21,12 @@ import ( func TestMutationCreate_GivenNonExistantField_Errors(t *testing.T) { test := testUtils.TestCase{ Description: "Simple create mutation with non existant field", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -132,9 +138,9 @@ func TestMutationCreate_GivenDuplicate_Errors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } -func TestMutationCreate_GivenEmptyData_Errors(t *testing.T) { +func TestMutationCreate_GivenEmptyInput(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple create mutation with empty data param.", + Description: "Simple create mutation with empty input param.", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -145,11 +151,15 @@ func TestMutationCreate_GivenEmptyData_Errors(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_Users(data: "") { + create_Users(input: {}) { _docID } }`, - ExpectedError: "given data payload is empty", + Results: []map[string]any{ + { + "_docID": "bae-524bfa06-849c-5daf-b6df-05c2da80844d", + }, + }, }, }, } diff --git a/tests/integration/mutation/create/with_version_test.go b/tests/integration/mutation/create/with_version_test.go index 9749119c60..b4578786c9 100644 --- a/tests/integration/mutation/create/with_version_test.go +++ b/tests/integration/mutation/create/with_version_test.go @@ -29,7 +29,7 @@ func TestMutationCreate_ReturnsVersionCID(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_Users(data: "{\"name\": \"John\"}") { + create_Users(input: {name: "John"}) { _version { cid } diff --git a/tests/integration/mutation/mix/with_txn_test.go b/tests/integration/mutation/mix/with_txn_test.go index 50cbee7809..de45e22fd4 100644 --- a/tests/integration/mutation/mix/with_txn_test.go +++ b/tests/integration/mutation/mix/with_txn_test.go @@ -33,7 +33,7 @@ func TestMutationWithTxnDeletesUserGivenSameTransaction(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27}") { + create_User(input: {name: "John", age: 27}) { _docID } }`, @@ -77,7 +77,7 @@ func TestMutationWithTxnDoesNotDeletesUserGivenDifferentTransactions(t *testing. testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27}") { + create_User(input: {name: "John", age: 27}) { _docID } }`, @@ -151,7 +151,7 @@ func TestMutationWithTxnDoesUpdateUserGivenSameTransactions(t *testing.T) { testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - update_User(data: "{\"age\": 28}") { + update_User(input: {age: 28}) { _docID } }`, @@ -205,7 +205,7 @@ func TestMutationWithTxnDoesNotUpdateUserGivenDifferentTransactions(t *testing.T testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - update_User(data: "{\"age\": 28}") { + update_User(input: {age: 28}) { _docID name age @@ -264,7 +264,7 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) testUtils.Request{ TransactionID: immutable.Some(0), Request: `mutation { - update_User(data: "{\"age\": 28}") { + update_User(input: {age: 28}) { _docID name age @@ -281,7 +281,7 @@ func TestMutationWithTxnDoesNotAllowUpdateInSecondTransactionUser(t *testing.T) testUtils.Request{ TransactionID: immutable.Some(1), Request: `mutation { - update_User(data: "{\"age\": 29}") { + update_User(input: {age: 29}) { _docID name age diff --git a/tests/integration/mutation/special/invalid_operation_test.go b/tests/integration/mutation/special/invalid_operation_test.go index 1694a37c67..1862d11c7c 100644 --- a/tests/integration/mutation/special/invalid_operation_test.go +++ b/tests/integration/mutation/special/invalid_operation_test.go @@ -29,7 +29,7 @@ func TestMutationInvalidMutation(t *testing.T) { }, testUtils.Request{ Request: `mutation { - dostuff_User(data: "") { + dostuff_User(input: {}) { _docID } }`, diff --git a/tests/integration/mutation/update/crdt/pncounter_test.go b/tests/integration/mutation/update/crdt/pncounter_test.go index fb5f30613e..f8ede1cffc 100644 --- a/tests/integration/mutation/update/crdt/pncounter_test.go +++ b/tests/integration/mutation/update/crdt/pncounter_test.go @@ -15,6 +15,8 @@ import ( "math" "testing" + "github.com/sourcenetwork/immutable" + testUtils "github.com/sourcenetwork/defradb/tests/integration" ) @@ -72,6 +74,12 @@ func TestPNCounterUpdate_IntKindWithPositiveIncrement_ShouldIncrement(t *testing func TestPNCounterUpdate_IntKindWithPositiveIncrementOverflow_RollsOverToMinInt64(t *testing.T) { test := testUtils.TestCase{ Description: "Positive increments of a PN Counter with Int type causing overflow behaviour", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return an error + // when integer type overflows + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/mutation/update/field_kinds/date_time_test.go b/tests/integration/mutation/update/field_kinds/date_time_test.go index 46dddaffa0..b7d1546864 100644 --- a/tests/integration/mutation/update/field_kinds/date_time_test.go +++ b/tests/integration/mutation/update/field_kinds/date_time_test.go @@ -85,7 +85,7 @@ func TestMutationUpdate_WithDateTimeField_MultipleDocs(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(data: "{\"created_at\": \"2031-07-23T03:23:23Z\"}") { + update_Users(input: {created_at: "2031-07-23T03:23:23Z"}) { name created_at } diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go index 882fddd891..dda55ffcfa 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/simple_test.go @@ -15,6 +15,8 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing.T) { @@ -23,6 +25,12 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromSingleSide_Error(t *testing test := testUtils.TestCase{ Description: "One to many update mutation using relation id from single side (wrong)", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, @@ -143,6 +151,12 @@ func TestMutationUpdateOneToMany_RelationIDToLinkFromManySideWithWrongField_Erro test := testUtils.TestCase{ Description: "One to many update mutation using relation id from many side, with a wrong field.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go index 6f4373976f..751ca67b78 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_many/with_alias_test.go @@ -262,6 +262,12 @@ func TestMutationUpdateOneToMany_AliasRelationNameToLinkFromManySideWithWrongFie test := testUtils.TestCase{ Description: "One to many update mutation using relation alias name from many side, with a wrong field.", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.CreateDoc{ CollectionID: 1, diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go index 67d5f0b38c..fdb8928964 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_alias_test.go @@ -236,7 +236,7 @@ func TestMutationUpdateOneToOne_AliasRelationNameToLinkFromSecondarySideWithWron }`, author2ID, ), - ExpectedError: "The given field does not exist. Name: notName", + ExpectedError: "Unknown field.", }, }, } diff --git a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go index 5b0980baab..6d38a9914d 100644 --- a/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go +++ b/tests/integration/mutation/update/field_kinds/one_to_one/with_simple_test.go @@ -14,9 +14,9 @@ import ( "fmt" "testing" - "github.com/sourcenetwork/immutable" - testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) // Note: This test should probably not pass, as it contains a @@ -420,7 +420,7 @@ func TestMutationUpdateOneToOne_RelationIDToLinkFromSecondarySideWithWrongField_ }`, author2ID, ), - ExpectedError: "The given field does not exist. Name: notName", + ExpectedError: "In field \"notName\": Unknown field.", }, }, } diff --git a/tests/integration/mutation/update/with_filter_test.go b/tests/integration/mutation/update/with_filter_test.go index d7b3ae9dde..1819a8ba0c 100644 --- a/tests/integration/mutation/update/with_filter_test.go +++ b/tests/integration/mutation/update/with_filter_test.go @@ -37,7 +37,7 @@ func TestMutationUpdate_WithBooleanFilter_ResultFilteredOut(t *testing.T) { testUtils.Request{ // The update will result in a record that no longer matches the filter Request: `mutation { - update_Users(filter: {verified: {_eq: true}}, data: "{\"verified\":false}") { + update_Users(filter: {verified: {_eq: true}}, input: {verified: false}) { _docID name verified @@ -88,7 +88,7 @@ func TestMutationUpdate_WithBooleanFilter(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(filter: {verified: {_eq: true}}, data: "{\"points\": 59}") { + update_Users(filter: {verified: {_eq: true}}, input: {points: 59}) { name points } diff --git a/tests/integration/mutation/update/with_id_test.go b/tests/integration/mutation/update/with_id_test.go index ddc0fe7128..899711a1ab 100644 --- a/tests/integration/mutation/update/with_id_test.go +++ b/tests/integration/mutation/update/with_id_test.go @@ -43,7 +43,7 @@ func TestMutationUpdate_WithId(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(docID: "bae-cc36febf-4029-52b3-a876-c99c6293f588", data: "{\"points\": 59}") { + update_Users(docID: "bae-cc36febf-4029-52b3-a876-c99c6293f588", input: {points: 59}) { name points } @@ -82,7 +82,7 @@ func TestMutationUpdate_WithNonExistantId(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(docID: "bae-does-not-exist", data: "{\"points\": 59}") { + update_Users(docID: "bae-does-not-exist", input: {points: 59}) { _docID name points diff --git a/tests/integration/mutation/update/with_ids_test.go b/tests/integration/mutation/update/with_ids_test.go index d1d7645829..59f4e7ac73 100644 --- a/tests/integration/mutation/update/with_ids_test.go +++ b/tests/integration/mutation/update/with_ids_test.go @@ -52,7 +52,7 @@ func TestMutationUpdate_WithIds(t *testing.T) { Request: `mutation { update_Users( docIDs: ["bae-cc36febf-4029-52b3-a876-c99c6293f588", "bae-3ac659d1-521a-5eba-a833-5c58b151ca72"], - data: "{\"points\": 59}" + input: {points: 59} ) { name points diff --git a/tests/integration/query/one_to_many/with_id_field_test.go b/tests/integration/query/one_to_many/with_id_field_test.go index 0a26cc17ff..8a16f1c49a 100644 --- a/tests/integration/query/one_to_many/with_id_field_test.go +++ b/tests/integration/query/one_to_many/with_id_field_test.go @@ -14,12 +14,20 @@ import ( "testing" testUtils "github.com/sourcenetwork/defradb/tests/integration" + + "github.com/sourcenetwork/immutable" ) // This documents unwanted behaviour, see https://github.com/sourcenetwork/defradb/issues/1520 func TestQueryOneToManyWithIdFieldOnPrimary(t *testing.T) { test := testUtils.TestCase{ Description: "One-to-many relation primary direction, id field with name clash on primary side", + SupportedMutationTypes: immutable.Some([]testUtils.MutationType{ + // GQL mutation will return a different error + // when field types do not match + testUtils.CollectionNamedMutationType, + testUtils.CollectionSaveMutationType, + }), Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index 9fbf2b914a..1c5c8e87a9 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -62,7 +62,7 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_Users(data: "{\"name\":\"Johnnnn\"}") { + update_Users(input: {name: "Johnnnn"}) { name verified } diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go index fb14d6ef30..b0ee08bb80 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_array_test.go @@ -469,7 +469,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_Users(data: "{\"name\": \"John\"}") { + create_Users(input: {name: "John"}) { _docID } }`, @@ -481,7 +481,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_Succeeds(t *testing.T) { }, testUtils.Request{ Request: fmt.Sprintf(`mutation { - create_Users(data: "{\"name\": \"Keenan\", \"foo\": \"%s\"}") { + create_Users(input: {name: "Keenan", foo: "%s"}) { name foo { name @@ -652,7 +652,7 @@ func TestSchemaUpdatesAddFieldKindForeignObjectArray_SingleSecondaryObjectKindSu CollectionID: 0, Doc: fmt.Sprintf(`{ "name": "Keenan", - "foo": "%s" + "foo_id": "%s" }`, key1, ), diff --git a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go index abaa1d4564..dc724d5af7 100644 --- a/tests/integration/schema/updates/add/field/kind/foreign_object_test.go +++ b/tests/integration/schema/updates/add/field/kind/foreign_object_test.go @@ -469,7 +469,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_Users(data: "{\"name\": \"John\"}") { + create_Users(input: {name: "John"}) { _docID } }`, @@ -481,7 +481,7 @@ func TestSchemaUpdatesAddFieldKindForeignObject_Succeeds(t *testing.T) { }, testUtils.Request{ Request: fmt.Sprintf(`mutation { - create_Users(data: "{\"name\": \"Keenan\", \"foo\": \"%s\"}") { + create_Users(input: {name: "Keenan", foo: "%s"}) { name foo { name diff --git a/tests/integration/subscription/subscription_test.go b/tests/integration/subscription/subscription_test.go index 947330fbd0..02ac058c90 100644 --- a/tests/integration/subscription/subscription_test.go +++ b/tests/integration/subscription/subscription_test.go @@ -43,7 +43,7 @@ func TestSubscriptionWithCreateMutations(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + create_User(input: {name: "John", age: 27, points: 42.1, verified: true}) { name } }`, @@ -55,7 +55,7 @@ func TestSubscriptionWithCreateMutations(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { + create_User(input: {name: "Addo", age: 31, points: 42.1, verified: true}) { name } }`, @@ -93,7 +93,7 @@ func TestSubscriptionWithFilterAndOneCreateMutation(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + create_User(input: {name: "John", age: 27, points: 42.1, verified: true}) { name } }`, @@ -125,7 +125,7 @@ func TestSubscriptionWithFilterAndOneCreateMutationOutsideFilter(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + create_User(input: {name: "John", age: 27, points: 42.1, verified: true}) { name } }`, @@ -163,7 +163,7 @@ func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_User(data: "{\"name\": \"John\",\"age\": 27,\"points\": 42.1,\"verified\": true}") { + create_User(input: {name: "John", age: 27, points: 42.1, verified: true}) { name } }`, @@ -175,7 +175,7 @@ func TestSubscriptionWithFilterAndCreateMutations(t *testing.T) { }, testUtils.Request{ Request: `mutation { - create_User(data: "{\"name\": \"Addo\",\"age\": 31,\"points\": 42.1,\"verified\": true}") { + create_User(input: {name: "Addo", age: 31, points: 42.1, verified: true}) { name } }`, @@ -233,7 +233,7 @@ func TestSubscriptionWithUpdateMutations(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_User(filter: {name: {_eq: "John"}}, data: "{\"points\": 45}") { + update_User(filter: {name: {_eq: "John"}}, input: {points: 45}) { name } }`, @@ -297,7 +297,7 @@ func TestSubscriptionWithUpdateAllMutations(t *testing.T) { }, testUtils.Request{ Request: `mutation { - update_User(data: "{\"points\": 55}") { + update_User(input: {points: 55}) { name } }`, diff --git a/tests/integration/utils2.go b/tests/integration/utils2.go index 9589cc1a9e..dc344d49f7 100644 --- a/tests/integration/utils2.go +++ b/tests/integration/utils2.go @@ -12,7 +12,6 @@ package tests import ( "context" - "encoding/json" "fmt" "os" "reflect" @@ -1147,17 +1146,17 @@ func createDocViaGQL( ) (*client.Document, error) { collection := collections[action.CollectionID] - escapedJson, err := json.Marshal(action.Doc) + input, err := jsonToGQL(action.Doc) require.NoError(s.t, err) request := fmt.Sprintf( `mutation { - create_%s(data: %s) { + create_%s(input: %s) { _docID } }`, collection.Name(), - escapedJson, + input, ) db := getStore(s, node, immutable.None[int](), action.ExpectedError) @@ -1294,18 +1293,18 @@ func updateDocViaGQL( doc := s.documents[action.CollectionID][action.DocID] collection := collections[action.CollectionID] - escapedJson, err := json.Marshal(action.Doc) + input, err := jsonToGQL(action.Doc) require.NoError(s.t, err) request := fmt.Sprintf( `mutation { - update_%s(docID: "%s", data: %s) { + update_%s(docID: "%s", input: %s) { _docID } }`, collection.Name(), doc.ID().String(), - escapedJson, + input, ) db := getStore(s, node, immutable.None[int](), action.ExpectedError) From 67a4552ffa953c003c8ab239a29425482cf62e36 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 11 Jan 2024 10:43:56 -0500 Subject: [PATCH 47/60] fix(i): Handle field in view query but not view SDL (#2192) ## Relevant issue(s) Resolves #2183 #2189 ## Description Handle field in view query but not view SDL. As such fields cannot be queried by the user anyway, we can safely ignore them (instead of panicing). --- core/doc.go | 15 +++ planner/view.go | 5 +- .../view/one_to_many/simple_test.go | 64 +++++++++++ .../view/one_to_many/with_count_test.go | 55 ++++++++++ .../{simple_test_test.go => simple_test.go} | 0 tests/integration/view/simple/simple_test.go | 100 ++++++++++++++++++ 6 files changed, 238 insertions(+), 1 deletion(-) rename tests/integration/view/one_to_one/{simple_test_test.go => simple_test.go} (100%) diff --git a/core/doc.go b/core/doc.go index 2a149dccc5..379ac79bf9 100644 --- a/core/doc.go +++ b/core/doc.go @@ -180,6 +180,21 @@ func (mapping *DocumentMapping) SetFirstOfName(d *Doc, name string, value any) { d.Fields[mapping.IndexesByName[name][0]] = value } +// TrySetFirstOfName overwrites the first field of this name with the given value. +// +// Will return false if the field does not exist, otherwise will return true. +func (mapping *DocumentMapping) TrySetFirstOfName(d *Doc, name string, value any) bool { + if indexes, ok := mapping.IndexesByName[name]; ok && len(indexes) > 0 { + index := indexes[0] + // Panicing here should be impossible unless there is something very wrong in + // the mapper code. + d.Fields[index] = value + return true + } + + return false +} + // FirstOfName returns the value of the first field of the given name. // // Will panic if the field does not exist (but not if it's value is default). diff --git a/planner/view.go b/planner/view.go index 7050469ffd..48a026f306 100644 --- a/planner/view.go +++ b/planner/view.go @@ -69,7 +69,10 @@ func (n *viewNode) Value() core.Doc { // will take into account any aliases defined in the base query. doc := n.docMapper.documentMapping.NewDoc() for fieldName, fieldValue := range sourceValue { - n.docMapper.documentMapping.SetFirstOfName(&doc, fieldName, fieldValue) + // If the field does not exist, ignore it an continue. It likely means that + // the field was declared in the query but not the SDL, and if it is not in the + // SDL it cannot be requested/rendered by the user and would be dropped later anyway. + _ = n.docMapper.documentMapping.TrySetFirstOfName(&doc, fieldName, fieldValue) } return doc diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index 02bb7cb8a5..249254c585 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -239,3 +239,67 @@ func TestView_OneToManyOuterToInnerToOuter_Errors(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_OneToManyWithRelationInQueryButNotInSDL(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with relation in query but not SDL", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + // Query books via author but do not declare relation in SDL + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: `query { + AuthorView { + name + } + }`, + Results: []map[string]any{ + { + "name": "Harper Lee", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_many/with_count_test.go b/tests/integration/view/one_to_many/with_count_test.go index ffc6b4cacd..256b2057bb 100644 --- a/tests/integration/view/one_to_many/with_count_test.go +++ b/tests/integration/view/one_to_many/with_count_test.go @@ -156,3 +156,58 @@ func TestView_OneToManyWithAliasedCount(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_OneToManyWithCountInQueryButNotSDL(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view with count in query but not sdl", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + _count(books: {}) + } + `, + SDL: ` + type AuthorView { + name: String + } + `, + }, + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.Request{ + Request: ` + query { + AuthorView { + name + } + } + `, + Results: []map[string]any{ + { + "name": "Harper Lee", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/view/one_to_one/simple_test_test.go b/tests/integration/view/one_to_one/simple_test.go similarity index 100% rename from tests/integration/view/one_to_one/simple_test_test.go rename to tests/integration/view/one_to_one/simple_test.go diff --git a/tests/integration/view/simple/simple_test.go b/tests/integration/view/simple/simple_test.go index 0e5aa0f4a7..802e281391 100644 --- a/tests/integration/view/simple/simple_test.go +++ b/tests/integration/view/simple/simple_test.go @@ -160,3 +160,103 @@ func TestView_SimpleWithFieldSubset_ErrorsSelectingExcludedField(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_SimpleWithExtraFieldInViewSDL(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with extra field in SDL", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + // `age` is present in SDL but not the query + SDL: ` + type UserView { + name: String + age: Int + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: `query { + UserView { + name + } + }`, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_SimpleWithExtraFieldInViewQuery(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view with extra field in view query", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + age: Int + } + `, + }, + testUtils.CreateView{ + // `age` is present in the query but not the SDL + Query: ` + User { + name + age + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserView { + name + } + } + `, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 2953a2809fa4d53b754bd0c902ca2bb9bf8b3060 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Thu, 11 Jan 2024 11:38:38 -0500 Subject: [PATCH 48/60] fix(i): Dont build mutation input types for embedded schema (#2193) ## Relevant issue(s) Resolves #2191 ## Description Don't generate mutation input types for embedded schema. --- request/graphql/schema/generate.go | 7 +++ .../view/one_to_many/simple_test.go | 60 +++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index 9e1d67a9c8..87efcbd56e 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -528,6 +528,13 @@ func (g *Generator) buildTypes( // for collection create and update mutation operations. func (g *Generator) buildMutationInputTypes(collections []client.CollectionDefinition) error { for _, c := range collections { + if c.Description.Name == "" { + // If the definition's collection is empty, this must be a collectionless + // schema, in which case users cannot mutate documents through it and we + // have no need to build mutation input types for it. + continue + } + // Copy the loop variable before usage within the loop or it // will be reassigned before the thunk is run // TODO remove when Go 1.22 diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index 249254c585..1eb4dfb7f2 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -303,3 +303,63 @@ func TestView_OneToManyWithRelationInQueryButNotInSDL(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_OneToManyMultipleViewsWithEmbeddedSchema(t *testing.T) { + test := testUtils.TestCase{ + Description: "Multiple one to many views with embedded schemas", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Book { + name + author { + name + } + } + `, + SDL: ` + type BookView { + name: String + author: AuthorView + } + interface AuthorView { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + Book { + name + author { + name + } + } + `, + SDL: ` + type BookView2 { + name: String + author: AuthorView2 + } + interface AuthorView2 { + name: String + } + `, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 31a2988549f545cd186c75dd46a02f8d07b78f45 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Fri, 12 Jan 2024 10:53:45 -0500 Subject: [PATCH 49/60] fix: Add entropy to counter CRDT type updates (#2186) ## Relevant issue(s) Resolves #2179 ## Description Counters need to be incrementable independently from different nodes. Initially, if two nodes incremented by the same amount from the same synced state, then the update CID would be the same and thus taken as a the same operation when syncing. So starting from 10 if both node A and B increment by 5 simultaneously, because they would generate the same CID for the update, the end result would be 15. We want it to be 20. This PR adds entropy to the PN Counter delta so that a situation like described above would result in 20. Note that the entropy is only added on update so that a document can be instantiated on multiple nodes with exactly the same CID. --- core/crdt/pncounter.go | 27 ++++++++- .../i2186-adding-entropy-to-pn-counter.md | 3 + merkle/crdt/pncounter.go | 5 +- .../mutation/create/crdt/pncounter_test.go | 2 + .../state/simple/peer/crdt/pncounter_test.go | 55 +++++++++++++++++++ .../query/simple/with_cid_doc_id_test.go | 17 +++--- 6 files changed, 98 insertions(+), 11 deletions(-) create mode 100644 docs/data_format_changes/i2186-adding-entropy-to-pn-counter.md diff --git a/core/crdt/pncounter.go b/core/crdt/pncounter.go index 5d79e24f19..7d8b02c1a4 100644 --- a/core/crdt/pncounter.go +++ b/core/crdt/pncounter.go @@ -13,6 +13,9 @@ package crdt import ( "bytes" "context" + "crypto/rand" + "math" + "math/big" "github.com/fxamacker/cbor/v2" dag "github.com/ipfs/boxo/ipld/merkledag" @@ -45,6 +48,9 @@ type PNCounterDelta[T Incrementable] struct { DocID []byte FieldName string Priority uint64 + // Nonce is an added randomly generated number that ensures + // that each increment operation is unique. + Nonce int64 // SchemaVersionID is the schema version datastore key at the time of commit. // // It can be used to identify the collection datastructure state at the time of commit. @@ -108,13 +114,30 @@ func (reg PNCounter[T]) Value(ctx context.Context) ([]byte, error) { } // Set generates a new delta with the supplied value -func (reg PNCounter[T]) Increment(value T) *PNCounterDelta[T] { +func (reg PNCounter[T]) Increment(ctx context.Context, value T) (*PNCounterDelta[T], error) { + // To ensure that the dag block is unique, we add a random number to the delta. + // This is done only on update (if the doc doesn't already exist) to ensure that the + // initial dag block of a document can be reproducible. + exists, err := reg.store.Has(ctx, reg.key.ToPrimaryDataStoreKey().ToDS()) + if err != nil { + return nil, err + } + var nonce int64 + if exists { + r, err := rand.Int(rand.Reader, big.NewInt(math.MaxInt64)) + if err != nil { + return nil, err + } + nonce = r.Int64() + } + return &PNCounterDelta[T]{ DocID: []byte(reg.key.DocID), FieldName: reg.fieldName, Data: value, SchemaVersionID: reg.schemaVersionKey.SchemaVersionId, - } + Nonce: nonce, + }, nil } // Merge implements ReplicatedData interface. diff --git a/docs/data_format_changes/i2186-adding-entropy-to-pn-counter.md b/docs/data_format_changes/i2186-adding-entropy-to-pn-counter.md new file mode 100644 index 0000000000..295e8a1b18 --- /dev/null +++ b/docs/data_format_changes/i2186-adding-entropy-to-pn-counter.md @@ -0,0 +1,3 @@ +# Adding entropy to pn counter delta + +We've added entropy to the pn counter delta by introducing a nonce field. This is causing some of the pn counter tests to have a different CID on updates. \ No newline at end of file diff --git a/merkle/crdt/pncounter.go b/merkle/crdt/pncounter.go index 2cadbee23c..74b7adb156 100644 --- a/merkle/crdt/pncounter.go +++ b/merkle/crdt/pncounter.go @@ -51,7 +51,10 @@ func (mPNC *MerklePNCounter[T]) Save(ctx context.Context, data any) (ipld.Node, if !ok { return nil, 0, NewErrUnexpectedValueType(client.PN_COUNTER, &client.FieldValue{}, data) } - delta := mPNC.reg.Increment(value.Value().(T)) + delta, err := mPNC.reg.Increment(ctx, value.Value().(T)) + if err != nil { + return nil, 0, err + } nd, err := mPNC.clock.AddDAGNode(ctx, delta) return nd, delta.GetPriority(), err } diff --git a/tests/integration/mutation/create/crdt/pncounter_test.go b/tests/integration/mutation/create/crdt/pncounter_test.go index 592e01bebb..2d445bff80 100644 --- a/tests/integration/mutation/create/crdt/pncounter_test.go +++ b/tests/integration/mutation/create/crdt/pncounter_test.go @@ -37,12 +37,14 @@ func TestPNCounterCreate_IntKindWithPositiveValue_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users { + _docID name points } }`, Results: []map[string]any{ { + "_docID": "bae-a688789e-d8a6-57a7-be09-22e005ab79e0", "name": "John", "points": int64(10), }, diff --git a/tests/integration/net/state/simple/peer/crdt/pncounter_test.go b/tests/integration/net/state/simple/peer/crdt/pncounter_test.go index f65f4bd6db..643ba738de 100644 --- a/tests/integration/net/state/simple/peer/crdt/pncounter_test.go +++ b/tests/integration/net/state/simple/peer/crdt/pncounter_test.go @@ -67,3 +67,58 @@ func TestP2PUpdate_WithPNCounter_NoError(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestP2PUpdate_WithPNCounterSimultaneousUpdate_NoError(t *testing.T) { + test := testUtils.TestCase{ + Actions: []any{ + testUtils.RandomNetworkingConfig(), + testUtils.RandomNetworkingConfig(), + testUtils.SchemaUpdate{ + Schema: ` + type Users { + Name: String + Age: Int @crdt(type: "pncounter") + } + `, + }, + testUtils.CreateDoc{ + // Create John on all nodes + Doc: `{ + "Name": "John", + "Age": 0 + }`, + }, + testUtils.ConnectPeers{ + SourceNodeID: 0, + TargetNodeID: 1, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(0), + Doc: `{ + "Age": 45 + }`, + }, + testUtils.UpdateDoc{ + NodeID: immutable.Some(1), + Doc: `{ + "Age": 45 + }`, + }, + testUtils.WaitForSync{}, + testUtils.Request{ + Request: `query { + Users { + Age + } + }`, + Results: []map[string]any{ + { + "Age": int64(90), + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} diff --git a/tests/integration/query/simple/with_cid_doc_id_test.go b/tests/integration/query/simple/with_cid_doc_id_test.go index 71e3f18869..4b40c4d76d 100644 --- a/tests/integration/query/simple/with_cid_doc_id_test.go +++ b/tests/integration/query/simple/with_cid_doc_id_test.go @@ -269,9 +269,10 @@ func TestQuerySimpleWithUpdateAndFirstCidAndDocIDAndSchemaVersion(t *testing.T) executeTestCase(t, test) } +// Note: Only the first CID is reproducible given the added entropy to the Counter CRDT type. func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple query with second last cid and docID with pncounter int type", + Description: "Simple query with first cid and docID with pncounter int type", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -300,7 +301,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeiabh6mqnysyrv5phhjikjyl5zgxnpxzxogpip7s7knyujkh7fx3qu", + cid: "bafybeiepi2gpoyshdj2ekdsydhw5itxqmipsh7f6pd6iyoiu6sqsdlj2se", docID: "bae-a688789e-d8a6-57a7-be09-22e005ab79e0" ) { name @@ -310,7 +311,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { Results: []map[string]any{ { "name": "John", - "points": int64(5), + "points": int64(10), }, }, }, @@ -320,9 +321,10 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithIntKind_NoError(t *testing.T) { testUtils.ExecuteTestCase(t, test) } +// Note: Only the first CID is reproducible given the added entropy to the Counter CRDT type. func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { test := testUtils.TestCase{ - Description: "Simple query with second last cid and docID with pncounter and float type", + Description: "Simple query with first cid and docID with pncounter and float type", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` @@ -351,7 +353,7 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { testUtils.Request{ Request: `query { Users ( - cid: "bafybeiaqw6oxeshkvd3ilzzagjy3c6h776l3hqvmz5loq4sokr7tlxkm5m", + cid: "bafybeihjdntxsc75hpnyakog4nnaxakljer7zf7pjybpgntcsg45qmisau", docID: "bae-fa6a97e9-e0e9-5826-8a8c-57775d35e07c" ) { name @@ -360,9 +362,8 @@ func TestCidAndDocIDQuery_ContainsPNCounterWithFloatKind_NoError(t *testing.T) { }`, Results: []map[string]any{ { - "name": "John", - // Note the lack of precision of float types. - "points": 4.8999999999999995, + "name": "John", + "points": 10.2, }, }, }, From bccf4e019babcb85be059813f0ca9eb66884ab55 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Fri, 12 Jan 2024 12:57:06 -0500 Subject: [PATCH 50/60] fix(i): Remove default collection fields from gql view types (#2202) ## Relevant issue(s) Resolves #2199 ## Description Removes default collection fields from gql view types. Views should not automatically have these fields. It will also be impossible to guarantee that they exist once we allow Views to have Lens transforms. --- request/graphql/schema/generate.go | 5 +++-- tests/integration/schema/default_fields.go | 6 +++--- .../integration/view/one_to_many/with_introspection_test.go | 4 ++-- tests/integration/view/simple/with_introspection_test.go | 2 +- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/request/graphql/schema/generate.go b/request/graphql/schema/generate.go index 87efcbd56e..1083772d58 100644 --- a/request/graphql/schema/generate.go +++ b/request/graphql/schema/generate.go @@ -417,6 +417,7 @@ func (g *Generator) buildTypes( collection := c fieldDescriptions := collection.Schema.Fields isEmbeddedObject := collection.Description.Name == "" + isViewObject := isEmbeddedObject || collection.Description.BaseQuery != nil var objectName string if isEmbeddedObject { @@ -441,7 +442,7 @@ func (g *Generator) buildTypes( fieldsThunk := (gql.FieldsThunk)(func() (gql.Fields, error) { fields := gql.Fields{} - if !isEmbeddedObject { + if !isViewObject { // automatically add the _docID: ID field to the type fields[request.DocIDFieldName] = &gql.Field{ Description: docIDFieldDescription, @@ -495,7 +496,7 @@ func (g *Generator) buildTypes( Type: gql.NewList(gqlType), } - if !isEmbeddedObject { + if !isViewObject { // add _version field fields[request.VersionFieldName] = &gql.Field{ Description: versionFieldDescription, diff --git a/tests/integration/schema/default_fields.go b/tests/integration/schema/default_fields.go index 97671738fb..23f1697938 100644 --- a/tests/integration/schema/default_fields.go +++ b/tests/integration/schema/default_fields.go @@ -70,9 +70,9 @@ var DefaultFields = concat( aggregateFields, ) -// DefaultEmbeddedObjFields contains the list of fields every -// defra embedded-object should have. -var DefaultEmbeddedObjFields = concat( +// DefaultViewObjFields contains the list of fields every +// defra view-object should have. +var DefaultViewObjFields = concat( fields{ groupField, }, diff --git a/tests/integration/view/one_to_many/with_introspection_test.go b/tests/integration/view/one_to_many/with_introspection_test.go index 895ed03fcb..8a8eeeb12d 100644 --- a/tests/integration/view/one_to_many/with_introspection_test.go +++ b/tests/integration/view/one_to_many/with_introspection_test.go @@ -70,7 +70,7 @@ func TestView_OneToMany_GQLIntrospectionTest(t *testing.T) { ExpectedData: map[string]any{ "__type": map[string]any{ "name": "AuthorView", - "fields": schema.DefaultFields.Append( + "fields": schema.DefaultViewObjFields.Append( schema.Field{ "name": "name", "type": map[string]any{ @@ -112,7 +112,7 @@ func TestView_OneToMany_GQLIntrospectionTest(t *testing.T) { // although aggregates and `_group` should be. // There should also be no `Author` field - the relationship field // should only exist on the parent. - "fields": schema.DefaultEmbeddedObjFields.Append( + "fields": schema.DefaultViewObjFields.Append( schema.Field{ "name": "name", "type": map[string]any{ diff --git a/tests/integration/view/simple/with_introspection_test.go b/tests/integration/view/simple/with_introspection_test.go index ada7d2cfcd..c8c45b9e8a 100644 --- a/tests/integration/view/simple/with_introspection_test.go +++ b/tests/integration/view/simple/with_introspection_test.go @@ -58,7 +58,7 @@ func TestView_Simple_GQLIntrospectionTest(t *testing.T) { ExpectedData: map[string]any{ "__type": map[string]any{ "name": "UserView", - "fields": schema.DefaultFields.Append( + "fields": schema.DefaultViewObjFields.Append( schema.Field{ "name": "name", "type": map[string]any{ From f175937666df7d619200b25109aa32cd6f04af94 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 12 Jan 2024 10:42:32 -0800 Subject: [PATCH 51/60] fix(i): Update mutation in readme (#2203) ## Relevant issue(s) N/A ## Description This PR fixes the format of mutations in the readme. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? N/A Specify the platform(s) on which this was tested: - MacOS --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index acc438273b..ed993387f3 100644 --- a/README.md +++ b/README.md @@ -100,7 +100,7 @@ Submit a `mutation` request to create a document of the `User` type: ```shell defradb client query ' mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { + create_User(input: {age: 31, verified: true, points: 90, name: "Bob"}) { _docID } } From 35d5c8756ce0da7cb303f24f66465c00d6e0e807 Mon Sep 17 00:00:00 2001 From: Keenan Nemetz Date: Fri, 12 Jan 2024 14:32:31 -0800 Subject: [PATCH 52/60] fix(i): Update example with typed input (#2205) ## Relevant issue(s) N/A ## Description This PR updates the example create request with the new typed input. ## Tasks - [x] I made sure the code is well commented, particularly hard-to-understand areas. - [x] I made sure the repository-held documentation is changed accordingly. - [x] I made sure the pull request title adheres to the conventional commit style (the subset used in the project can be found in [tools/configs/chglog/config.yml](tools/configs/chglog/config.yml)). - [x] I made sure to discuss its limitations such as threats to validity, vulnerability to mistake and misuse, robustness to invalidation of assumptions, resource requirements, ... ## How has this been tested? N/A Specify the platform(s) on which this was tested: - MacOS --- examples/request/user_creation.graphql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/request/user_creation.graphql b/examples/request/user_creation.graphql index 0cab4c6d45..b7b694a009 100644 --- a/examples/request/user_creation.graphql +++ b/examples/request/user_creation.graphql @@ -1,5 +1,5 @@ mutation { - create_User(data: "{\"age\": 31, \"verified\": true, \"points\": 90, \"name\": \"Bob\"}") { + create_User(input: {age: 31, verified: true, points: 90, name: "Bob"}) { _docID } } From 08d8e4af0e7fa7e4b3c5768a6abac8337d9c34d9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 02:16:03 -0800 Subject: [PATCH 53/60] bot: Update dependencies (bulk dependabot PRs) 15-01-2024 (#2217) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ✅ This PR was created by the Combine PRs action by combining the following PRs: #2216 bot: Bump @typescript-eslint/parser from 6.18.0 to 6.18.1 in /playground #2214 bot: Bump swagger-ui-react from 5.10.5 to 5.11.0 in /playground #2211 bot: Bump github.com/multiformats/go-multiaddr from 0.12.0 to 0.12.1 #2210 bot: Bump github.com/ipfs/boxo from 0.16.0 to 0.17.0 ⚠️ The following PRs were left out due to merge conflicts: #2215 bot: Bump @typescript-eslint/eslint-plugin from 6.18.0 to 6.18.1 in /playground #2213 bot: Bump @types/react from 18.2.47 to 18.2.48 in /playground #2209 bot: Bump golang.org/x/crypto from 0.17.0 to 0.18.0 #2207 bot: Bump golang.org/x/net from 0.19.0 to 0.20.0 --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 40 +- go.sum | 86 ++--- playground/package-lock.json | 697 ++++++++++++++++++----------------- playground/package.json | 4 +- 4 files changed, 422 insertions(+), 405 deletions(-) diff --git a/go.mod b/go.mod index d2c1f2aeb4..f9de929232 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/go-errors/errors v1.5.1 github.com/gofrs/uuid/v5 v5.0.0 github.com/iancoleman/strcase v0.3.0 - github.com/ipfs/boxo v0.16.0 + github.com/ipfs/boxo v0.17.0 github.com/ipfs/go-block-format v0.2.0 github.com/ipfs/go-cid v0.4.1 github.com/ipfs/go-datastore v0.6.0 @@ -28,7 +28,7 @@ require ( github.com/libp2p/go-libp2p-pubsub v0.10.0 github.com/libp2p/go-libp2p-record v0.2.0 github.com/mitchellh/mapstructure v1.5.0 - github.com/multiformats/go-multiaddr v0.12.0 + github.com/multiformats/go-multiaddr v0.12.1 github.com/multiformats/go-multibase v0.2.0 github.com/multiformats/go-multihash v0.2.3 github.com/sourcenetwork/badger/v4 v4.2.1-0.20231113215945-a63444ca5276 @@ -46,9 +46,9 @@ require ( go.opentelemetry.io/otel/metric v1.21.0 go.opentelemetry.io/otel/sdk/metric v1.21.0 go.uber.org/zap v1.26.0 - golang.org/x/crypto v0.17.0 - golang.org/x/exp v0.0.0-20231127185646-65229373498e - golang.org/x/net v0.19.0 + golang.org/x/crypto v0.18.0 + golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc + golang.org/x/net v0.20.0 google.golang.org/grpc v1.60.1 google.golang.org/protobuf v1.32.0 ) @@ -70,10 +70,10 @@ require ( github.com/docker/go-units v0.5.0 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/elastic/gosigar v0.14.2 // indirect - github.com/flynn/noise v1.0.0 // indirect + github.com/flynn/noise v1.0.1 // indirect github.com/francoispqt/gojay v1.2.13 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect - github.com/go-logr/logr v1.3.0 // indirect + github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/jsonpointer v0.19.6 // indirect github.com/go-openapi/swag v0.22.4 // indirect @@ -85,8 +85,8 @@ require ( github.com/golang/protobuf v1.5.3 // indirect github.com/google/flatbuffers v2.0.6+incompatible // indirect github.com/google/gopacket v1.1.19 // indirect - github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b // indirect - github.com/google/uuid v1.4.0 // indirect + github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 // indirect + github.com/google/uuid v1.5.0 // indirect github.com/gorilla/websocket v1.5.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect @@ -110,13 +110,13 @@ require ( github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect github.com/josharian/intern v1.0.0 // indirect - github.com/klauspost/compress v1.17.2 // indirect + github.com/klauspost/compress v1.17.4 // indirect github.com/klauspost/cpuid/v2 v2.2.6 // indirect github.com/koron/go-ssdp v0.0.4 // indirect github.com/libp2p/go-buffer-pool v0.1.0 // indirect github.com/libp2p/go-cidranger v1.1.0 // indirect github.com/libp2p/go-flow-metrics v0.1.0 // indirect - github.com/libp2p/go-libp2p-asn-util v0.3.0 // indirect + github.com/libp2p/go-libp2p-asn-util v0.4.1 // indirect github.com/libp2p/go-libp2p-kbucket v0.6.3 // indirect github.com/libp2p/go-libp2p-routing-helpers v0.7.3 // indirect github.com/libp2p/go-msgio v0.3.0 // indirect @@ -143,7 +143,7 @@ require ( github.com/multiformats/go-multistream v0.5.0 // indirect github.com/multiformats/go-varint v0.0.7 // indirect github.com/onsi/ginkgo v1.16.5 // indirect - github.com/onsi/ginkgo/v2 v2.13.0 // indirect + github.com/onsi/ginkgo/v2 v2.13.2 // indirect github.com/opencontainers/runtime-spec v1.1.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect @@ -152,13 +152,13 @@ require ( github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/polydawn/refmt v0.89.0 // indirect - github.com/prometheus/client_golang v1.17.0 // indirect + github.com/prometheus/client_golang v1.18.0 // indirect github.com/prometheus/client_model v0.5.0 // indirect github.com/prometheus/common v0.45.0 // indirect github.com/prometheus/procfs v0.12.0 // indirect github.com/quic-go/qpack v0.4.0 // indirect - github.com/quic-go/qtls-go1-20 v0.3.4 // indirect - github.com/quic-go/quic-go v0.39.4 // indirect + github.com/quic-go/qtls-go1-20 v0.4.1 // indirect + github.com/quic-go/quic-go v0.40.1 // indirect github.com/quic-go/webtransport-go v0.6.0 // indirect github.com/raulk/go-watchdog v1.3.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect @@ -180,15 +180,15 @@ require ( go.opentelemetry.io/otel/trace v1.21.0 // indirect go.uber.org/dig v1.17.1 // indirect go.uber.org/fx v1.20.1 // indirect - go.uber.org/mock v0.3.0 // indirect + go.uber.org/mock v0.4.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/mod v0.14.0 // indirect - golang.org/x/sync v0.5.0 // indirect - golang.org/x/sys v0.15.0 // indirect + golang.org/x/sync v0.6.0 // indirect + golang.org/x/sys v0.16.0 // indirect golang.org/x/text v0.14.0 // indirect - golang.org/x/tools v0.16.0 // indirect + golang.org/x/tools v0.16.1 // indirect gonum.org/v1/gonum v0.14.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect lukechampine.com/blake3 v1.2.1 // indirect diff --git a/go.sum b/go.sum index eca55e42f6..f70ba2aaaf 100644 --- a/go.sum +++ b/go.sum @@ -12,7 +12,7 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/DataDog/zstd v1.4.1 h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM= github.com/Jorropo/jsync v1.0.1 h1:6HgRolFZnsdfzRUj+ImB9og1JYOxQoReSywkHOGSaUU= github.com/Jorropo/jsync v1.0.1/go.mod h1:jCOZj3vrBCri3bSU3ErUYvevKlnbssrXeCivybS5ABQ= -github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc= +github.com/alecthomas/units v0.0.0-20231202071711-9a357b53e9c9 h1:ez/4by2iGztzR4L0zgAOR8lTQK9VlyBVVd7G4omaOQs= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= @@ -48,7 +48,7 @@ github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:ma github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0qnXZOBM= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 h1:HVTnpeuvF6Owjd5mniCL8DEXo7uYXdQEmOP4FJbV5tg= +github.com/crackcomm/go-gitignore v0.0.0-20231225121904-e25f5bc08668 h1:ZFUue+PNxmHlu7pYv+IYMtqlaO/0VwaGEqKepZf9JpA= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/cskr/pubsub v1.0.2 h1:vlOzMhl6PFn60gRlTQQsIfVwaPB/B/8MziK8FhEPt/0= github.com/cskr/pubsub v1.0.2/go.mod h1:/8MzYXk/NJAz782G8RPkFzXTZVu63VotefPnR9TIRis= @@ -83,8 +83,8 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7 github.com/evanphx/json-patch/v5 v5.7.0 h1:nJqP7uwL84RJInrohHfW0Fx3awjbm8qZeFv0nW9SYGc= github.com/evanphx/json-patch/v5 v5.7.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= -github.com/flynn/noise v1.0.0 h1:DlTHqmzmvcEiKj+4RYo/imoswx/4r6iBlCMfVtrMXpQ= -github.com/flynn/noise v1.0.0/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= +github.com/flynn/noise v1.0.1 h1:vPp/jdQLXC6ppsXSj/pM3W1BIJ5FEHE2TulSJBpb43Y= +github.com/flynn/noise v1.0.1/go.mod h1:xbMo+0i6+IGbYdJhF31t2eR1BIU0CYc12+BNAKwUTag= github.com/francoispqt/gojay v1.2.13 h1:d2m3sFjloqoIUQU3TsHBgj6qg/BVGlTBeHDUmyJnXKk= github.com/francoispqt/gojay v1.2.13/go.mod h1:ehT5mTG4ua4581f1++1WLG0vPdaA9HaiDsoyrBGkyDY= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= @@ -106,8 +106,8 @@ github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm github.com/go-errors/errors v1.5.1 h1:ZwEMSLRCapFLflTpT7NKaAc7ukJ8ZPEjzlxt8rPN8bk= github.com/go-errors/errors v1.5.1/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= -github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY= -github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= +github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= @@ -173,12 +173,12 @@ github.com/google/gopacket v1.1.19 h1:ves8RnFZPGiFnTS0uPQStjwru6uO6h+nlr9j6fL7kF github.com/google/gopacket v1.1.19/go.mod h1:iJ8V8n6KS+z2U1A8pUwu8bW5SyEMkXJB8Yo/Vo+TKTo= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b h1:RMpPgZTSApbPf7xaVel+QkoGPRLFLrwFO89uDUHEGf0= -github.com/google/pprof v0.0.0-20231023181126-ff6d637d2a7b/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= +github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42 h1:dHLYa5D8/Ta0aLR2XcPsrkpAgGeFs6thhMcQK0oQ0n8= +github.com/google/pprof v0.0.0-20231229205709-960ae82b1e42/go.mod h1:czg5+yv1E0ZGTi6S6vVK1mke0fV+FaUhNGcd6VRS9Ik= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4= -github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= +github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -212,8 +212,8 @@ github.com/invopop/yaml v0.2.0 h1:7zky/qH+O0DwAyoobXUqvVBwgBFRxKoQ/3FjcVpjTMY= github.com/invopop/yaml v0.2.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q= github.com/ipfs/bbloom v0.0.4 h1:Gi+8EGJ2y5qiD5FbsbpX/TMNcJw8gSqr7eyjHa4Fhvs= github.com/ipfs/bbloom v0.0.4/go.mod h1:cS9YprKXpoZ9lT0n/Mw/a6/aFV6DTjTLYHeA+gyqMG0= -github.com/ipfs/boxo v0.16.0 h1:A9dUmef5a+mEFki6kbyG7el5gl65CiUBzrDeZxzTWKY= -github.com/ipfs/boxo v0.16.0/go.mod h1:jAgpNQn7T7BnibUeReXcKU9Ha1xmYNyOlwVEl193ow0= +github.com/ipfs/boxo v0.17.0 h1:fVXAb12dNbraCX1Cdid5BB6Kl62gVLNVA+e0EYMqAU0= +github.com/ipfs/boxo v0.17.0/go.mod h1:pIZgTWdm3k3pLF9Uq6MB8JEcW07UDwNJjlXW1HELW80= github.com/ipfs/go-bitfield v1.1.0 h1:fh7FIo8bSwaJEh6DdTWbCeZ1eqOaOkKFI74SCnsWbGA= github.com/ipfs/go-block-format v0.2.0 h1:ZqrkxBA2ICbDRbK8KJs/u0O3dlp6gmAuuXUJNiW1Ycs= github.com/ipfs/go-block-format v0.2.0/go.mod h1:+jpL11nFx5A/SPpsoBn6Bzkra/zaArfSmsknbPMYgzM= @@ -269,8 +269,8 @@ github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfV github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4= -github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= +github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= +github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/koron/go-ssdp v0.0.4 h1:1IDwrghSKYM7yLf7XCzbByg2sJ/JcNOZRXS2jczTwz0= @@ -293,8 +293,8 @@ github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFG github.com/libp2p/go-flow-metrics v0.1.0/go.mod h1:4Xi8MX8wj5aWNDAZttg6UPmc0ZrnFNsMtpsYUClFtro= github.com/libp2p/go-libp2p v0.32.2 h1:s8GYN4YJzgUoyeYNPdW7JZeZ5Ee31iNaIBfGYMAY4FQ= github.com/libp2p/go-libp2p v0.32.2/go.mod h1:E0LKe+diV/ZVJVnOJby8VC5xzHF0660osg71skcxJvk= -github.com/libp2p/go-libp2p-asn-util v0.3.0 h1:gMDcMyYiZKkocGXDQ5nsUQyquC9+H+iLEQHwOCZ7s8s= -github.com/libp2p/go-libp2p-asn-util v0.3.0/go.mod h1:B1mcOrKUE35Xq/ASTmQ4tN3LNzVVaMNmq2NACuqyB9w= +github.com/libp2p/go-libp2p-asn-util v0.4.1 h1:xqL7++IKD9TBFMgnLPZR6/6iYhawHKHl950SO9L6n94= +github.com/libp2p/go-libp2p-asn-util v0.4.1/go.mod h1:d/NI6XZ9qxw67b4e+NgpQexCIiFYJjErASrYW4PFDN8= github.com/libp2p/go-libp2p-gostream v0.6.0 h1:QfAiWeQRce6pqnYfmIVWJFXNdDyfiR/qkCnjyaZUPYU= github.com/libp2p/go-libp2p-gostream v0.6.0/go.mod h1:Nywu0gYZwfj7Jc91PQvbGU8dIpqbQQkjWgDuOrFaRdA= github.com/libp2p/go-libp2p-kad-dht v0.25.2 h1:FOIk9gHoe4YRWXTu8SY9Z1d0RILol0TrtApsMDPjAVQ= @@ -363,8 +363,8 @@ github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9 github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4= github.com/multiformats/go-multiaddr v0.1.1/go.mod h1:aMKBKNEYmzmDmxfX88/vz+J5IU55txyt0p4aiWVohjo= github.com/multiformats/go-multiaddr v0.2.0/go.mod h1:0nO36NvPpyV4QzvTLi/lafl2y95ncPj0vFwVF6k6wJ4= -github.com/multiformats/go-multiaddr v0.12.0 h1:1QlibTFkoXJuDjjYsMHhE73TnzJQl8FSWatk/0gxGzE= -github.com/multiformats/go-multiaddr v0.12.0/go.mod h1:WmZXgObOQOYp9r3cslLlppkrz1FYSHmE834dfz/lWu8= +github.com/multiformats/go-multiaddr v0.12.1 h1:vm+BA/WZA8QZDp1pF1FWhi5CT3g1tbi5GJmqpb6wnlk= +github.com/multiformats/go-multiaddr v0.12.1/go.mod h1:7mPkiBMmLeFipt+nNSq9pHZUeJSt8lHBgH6yhj0YQzE= github.com/multiformats/go-multiaddr-dns v0.3.1 h1:QgQgR+LQVt3NPTjbrLLpsaT2ufAA2y0Mkk+QRVJbW3A= github.com/multiformats/go-multiaddr-dns v0.3.1/go.mod h1:G/245BRQ6FJGmryJCrOuTdB37AMA5AMOVuO6NY3JwTk= github.com/multiformats/go-multiaddr-fmt v0.1.0 h1:WLEFClPycPkp4fnIzoFoV9FVd49/eQsuaL3/CWe167E= @@ -392,12 +392,12 @@ github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108 github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4= -github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o= +github.com/onsi/ginkgo/v2 v2.13.2 h1:Bi2gGVkfn6gQcjNjZJVO8Gf0FHzMPf2phUei9tejVMs= +github.com/onsi/ginkgo/v2 v2.13.2/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= +github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg= github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg= github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= @@ -419,8 +419,8 @@ github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH github.com/polydawn/refmt v0.89.0 h1:ADJTApkvkeBZsN0tBTx8QjpD9JkmxbKp0cxfr9qszm4= github.com/polydawn/refmt v0.89.0/go.mod h1:/zvteZs/GwLtCgZ4BL6CBsk9IKIlexP43ObX9AxTqTw= github.com/prometheus/client_golang v0.8.0/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v1.17.0 h1:rl2sfwZMtSthVU752MqfjQozy7blglC+1SOtjMAMh+Q= -github.com/prometheus/client_golang v1.17.0/go.mod h1:VeL+gMmOAxkS2IqfCq0ZmHSL+LjWfWDUmp1mBz9JgUY= +github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= +github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw= @@ -433,10 +433,10 @@ github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/quic-go/qpack v0.4.0 h1:Cr9BXA1sQS2SmDUWjSofMPNKmvF6IiIfDRmgU0w1ZCo= github.com/quic-go/qpack v0.4.0/go.mod h1:UZVnYIfi5GRk+zI9UMaCPsmZ2xKJP7XBUvVyT1Knj9A= -github.com/quic-go/qtls-go1-20 v0.3.4 h1:MfFAPULvst4yoMgY9QmtpYmfij/em7O8UUi+bNVm7Cg= -github.com/quic-go/qtls-go1-20 v0.3.4/go.mod h1:X9Nh97ZL80Z+bX/gUXMbipO6OxdiDi58b/fMC9mAL+k= -github.com/quic-go/quic-go v0.39.4 h1:PelfiuG7wXEffUT2yceiqz5V6Pc0TA5ruOd1LcmFc1s= -github.com/quic-go/quic-go v0.39.4/go.mod h1:T09QsDQWjLiQ74ZmacDfqZmhY/NLnw5BC40MANNNZ1Q= +github.com/quic-go/qtls-go1-20 v0.4.1 h1:D33340mCNDAIKBqXuAvexTNMUByrYmFYVfKfDN5nfFs= +github.com/quic-go/qtls-go1-20 v0.4.1/go.mod h1:X9Nh97ZL80Z+bX/gUXMbipO6OxdiDi58b/fMC9mAL+k= +github.com/quic-go/quic-go v0.40.1 h1:X3AGzUNFs0jVuO3esAGnTfvdgvL4fq655WaOi1snv1Q= +github.com/quic-go/quic-go v0.40.1/go.mod h1:PeN7kuVJ4xZbxSv/4OX6S1USOX8MJvydwpTx31vx60c= github.com/quic-go/webtransport-go v0.6.0 h1:CvNsKqc4W2HljHJnoT+rMmbRJybShZ0YPFDD3NxaZLY= github.com/quic-go/webtransport-go v0.6.0/go.mod h1:9KjU4AEBqEQidGHNDkZrb8CAa1abRaosM2yGOyiikEc= github.com/raulk/go-watchdog v1.3.0 h1:oUmdlHxdkXRJlwfG0O9omj8ukerm8MEQavSiDTEtBsk= @@ -575,8 +575,8 @@ go.uber.org/fx v1.20.1 h1:zVwVQGS8zYvhh9Xxcu4w1M6ESyeMzebzj2NbSayZ4Mk= go.uber.org/fx v1.20.1/go.mod h1:iSYNbHf2y55acNCwCXKx7LbWb5WG1Bnue5RDXz1OREg= go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= -go.uber.org/mock v0.3.0 h1:3mUxI1No2/60yUYax92Pt8eNOEecx2D3lcXZh2NEZJo= -go.uber.org/mock v0.3.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= +go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= +go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= @@ -597,11 +597,11 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= -golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= +golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20231127185646-65229373498e h1:Gvh4YaCaXNs6dKTlfgismwWZKyjVZXwOPfIyUaqU3No= -golang.org/x/exp v0.0.0-20231127185646-65229373498e/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= +golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc h1:ao2WRsKSzW6KuUY9IWPwWahcHCgR0s52IfwutMfEbdM= +golang.org/x/exp v0.0.0-20240103183307-be819d1f06fc/go.mod h1:iRJReGqOEeBhDZGkGbynYwcHlctCvnjTYIamk7uXpHI= golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -634,8 +634,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= +golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= +golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181017192945-9dcd33a902f4/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20181203162652-d668ce993890/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -649,8 +649,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ= +golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180810173357-98c5dad5d1a0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -677,8 +677,8 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= -golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= +golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -706,8 +706,8 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.16.0 h1:GO788SKMRunPIBCXiQyo2AaexLstOrVhuAL5YwsckQM= -golang.org/x/tools v0.16.0/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= +golang.org/x/tools v0.16.1 h1:TLyB3WofjdOEepBHAU20JdNC1Zbg87elYofWYAY5oZA= +golang.org/x/tools v0.16.1/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -728,8 +728,8 @@ google.golang.org/genproto v0.0.0-20181202183823-bd91e49a0898/go.mod h1:7Ep/1NZk google.golang.org/genproto v0.0.0-20190306203927-b5d61aea6440/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f h1:ultW7fxlIvee4HYrtnaRPon9HpEgFk5zYpmfMgtKB5I= -google.golang.org/genproto/googleapis/rpc v0.0.0-20231120223509-83a465c0220f/go.mod h1:L9KNLi232K1/xB6f7AlSX692koaRnKaWSR0stBki0Yc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 h1:gphdwh0npgs8elJ4T6J+DQJHPVF7RsuJHCfwztUb4J4= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1/go.mod h1:daQN87bsDqDoe316QbbvX60nMoJQa4r6Ds0ZuoAe5yA= google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.16.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= diff --git a/playground/package-lock.json b/playground/package-lock.json index aa74ece173..323e6f16d0 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -12,14 +12,14 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.10.5" + "swagger-ui-react": "^5.11.0" }, "devDependencies": { "@types/react": "^18.2.47", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.18.0", - "@typescript-eslint/parser": "^6.18.0", + "@typescript-eslint/parser": "^6.18.1", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", @@ -49,9 +49,9 @@ } }, "node_modules/@babel/runtime-corejs3": { - "version": "7.23.6", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.23.6.tgz", - "integrity": "sha512-Djs/ZTAnpyj0nyg7p1J6oiE/tZ9G2stqAFlLGZynrW+F3k2w2jGK2mLOBxzYIOcZYA89+c3d3wXKpYLcpwcU6w==", + "version": "7.23.8", + "resolved": "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.23.8.tgz", + "integrity": "sha512-2ZzmcDugdm0/YQKFVYsXiwUN7USPX8PM7cytpb4PFl87fM+qYPSvTZX//8tyeJB1j0YDmafBJEbl5f8NfLyuKw==", "dependencies": { "core-js-pure": "^3.30.2", "regenerator-runtime": "^0.14.0" @@ -61,9 +61,9 @@ } }, "node_modules/@braintree/sanitize-url": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", - "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==" + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.0.0.tgz", + "integrity": "sha512-GMu2OJiTd1HSe74bbJYQnVvELANpYiGFZELyyTM1CR0sdv5ReQAcJ/c/8pIrPab3lO11+D+EpuGLUxqz+y832g==" }, "node_modules/@codemirror/language": { "version": "6.0.0", @@ -1558,12 +1558,12 @@ ] }, "node_modules/@swagger-api/apidom-ast": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.88.0.tgz", - "integrity": "sha512-Gsp2VRWRrekIvxWRV8dEdigRpxGc0PSM+tB7MC1BJJEMZvKzj+xWcU2QcDJLO2/DgBIRvsvtEX8ZfLWqUubT2A==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ast/-/apidom-ast-0.92.0.tgz", + "integrity": "sha512-j9vuKaYZP3mAGXUcKeWIkSToxPPCBLJcLEfjSEh14P0n6NRJp7Yg19SA+IwHdIvOAfJonuebj/lhPOMjzd6P1g==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-error": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1572,13 +1572,13 @@ } }, "node_modules/@swagger-api/apidom-core": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.88.0.tgz", - "integrity": "sha512-Zfeww6tphn1eDaAHhECFEULnBspF0u1J2x1a5x7E3LMS7OuyE+/53xOyN71UAakvX1+K+Cw8UBLxR0yGbFEMow==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-core/-/apidom-core-0.92.0.tgz", + "integrity": "sha512-PK1zlS0UCcE5dIPtSy8/+oWfXAVf7b/iM3LRaPgaFGF5b8qa6S/zmROTh10Yjug9v9Vnuq8opEhyHkGyl+WdSA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ast": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", "@types/ramda": "~0.29.6", "minim": "~0.23.8", "ramda": "~0.29.1", @@ -1588,36 +1588,36 @@ } }, "node_modules/@swagger-api/apidom-error": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.88.0.tgz", - "integrity": "sha512-RBhk2rlZn/oi916cgwKj+b/ynHHHabAcVzi0T7VY38JbU+6ab8F+JDbUSvFK42vmCF3/FSnpf7CnIm6TEBuaQA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-error/-/apidom-error-0.92.0.tgz", + "integrity": "sha512-wo7xCvTpWr5Lpt/ly1L4bhZ6W7grgtAg7SK/d8FNZR85zPJXM4FPMpcRtKktfWJ/RikQJT/g5DjI33iTqB6z/w==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7" } }, "node_modules/@swagger-api/apidom-json-pointer": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.88.0.tgz", - "integrity": "sha512-wGdKNhA5WGwegJ6spTfPxg9te5dyAUDQLArTa0wesFtpVV5cXg9jVifSCmuFHJBTzBHLF3xyAbJNe4luq8QW9Q==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-json-pointer/-/apidom-json-pointer-0.92.0.tgz", + "integrity": "sha512-VmZ1EXE7BWX+ndeeh9t1uFRql5jbPRmAcglUfdtu3jlg6fOqXzzgx9qFpRz9GhpMHWEGFm1ymd8tMAa1CvgcHw==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-ns-api-design-systems": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.88.0.tgz", - "integrity": "sha512-JELQajWJOYGAnx7T3k33v8HQlIkmHmgfiCNarHTCV6i1mImJDRTPTYvvyPMWzzyx9JdQR1u47ZFb7b7I33k7vg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-api-design-systems/-/apidom-ns-api-design-systems-0.92.0.tgz", + "integrity": "sha512-wXEXhw0wDQIPTUqff953h44oQZr29DcoAzZfROWlGtOLItGDDMjhfIYiRg1406mXA4N7d5d0vNi9V/HXkxItQw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1625,14 +1625,14 @@ } }, "node_modules/@swagger-api/apidom-ns-asyncapi-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.88.0.tgz", - "integrity": "sha512-80025KRDyRMgHFSZt8LT1S0wVK6VkzMKec0w4u1vrnjcC9lcAWmV1Ojuur6g2afEbn4Avv9bcUb6uPb9U3NEYA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-asyncapi-2/-/apidom-ns-asyncapi-2-0.92.0.tgz", + "integrity": "sha512-FmJLT3GqzT4HK7Mwh54cXZ4PZt58yKVtJAKWKJ0dg2/Gim0AKJWf6t6B3Z9ZFUiKyehbqP4K7gSM7qGL0tKe2Q==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-json-schema-draft-7": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-json-schema-draft-7": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1640,13 +1640,13 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-4": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.88.0.tgz", - "integrity": "sha512-m0h+HMUTKZ7MNJMflJwXC0ArFgLntENmIg4pqtPKTcA7Qwij8rJjKzGgHvXBe1ahkVc6uGBulIWOT86hpIxWSg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-4/-/apidom-ns-json-schema-draft-4-0.92.0.tgz", + "integrity": "sha512-7s2EKjCQwRXbK4Y4AGpVkyn1AANCxOUFSHebo1h2katyVeAopV0LJmbXH5yQedTltV0k3BIjnd7hS+7dI846Pw==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.88.0", - "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-ast": "^0.92.0", + "@swagger-api/apidom-core": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1654,15 +1654,15 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-6": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.88.0.tgz", - "integrity": "sha512-ow4rcijuw+DX29Kv4kZS7AaeblmpHv4fxDumKrfv2raQbj4YCew0tK+8LEno4ssIjcHCIOUbbxU1rLAJPtqwyA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-6/-/apidom-ns-json-schema-draft-6-0.92.0.tgz", + "integrity": "sha512-zur80x04jesXVzlU9sLZhW4giO9RfOouI7L/H8v2wUlcBvjaPBn1tIqrURw2VEHKAcJORhTRusQCR21vnFot2g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1670,15 +1670,15 @@ } }, "node_modules/@swagger-api/apidom-ns-json-schema-draft-7": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.88.0.tgz", - "integrity": "sha512-YlNb5Z6vDhVJGsSbXBhJLt5pRiV2tf0fBZH1rkBrzX0Zl0TJYD7X4bAkU/FdZsK1eDhvEYVq8VQHJRrwbqCr4g==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-json-schema-draft-7/-/apidom-ns-json-schema-draft-7-0.92.0.tgz", + "integrity": "sha512-DSY7lY98XHnc0wg0V38ZmBPs5HWuRuSb6G+n5Z+qs5RRodh1x5BrTIY6M0Yk3oJVbbEoFGmF0VlTe6vHf44pbw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", - "@swagger-api/apidom-ns-json-schema-draft-6": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", + "@swagger-api/apidom-ns-json-schema-draft-6": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1686,15 +1686,15 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.88.0.tgz", - "integrity": "sha512-QVay4Kh3Z1KV7UScJezdiIBQiBNAhTitAz2XY4U5kpyiifn0Z/KlokeMBk5mpuwWxFo83PPoB+kCTb/Joa3D7g==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-2/-/apidom-ns-openapi-2-0.92.0.tgz", + "integrity": "sha512-OJlSTvPzK+zqzd2xXeWkF50z08Wlpygc98eVzZjYI0Af8mz7x6R5T9BCP5p6ZlQoO9OTvk4gfv7ViWXCdamObg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1702,14 +1702,14 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-3-0": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.88.0.tgz", - "integrity": "sha512-RrAe32aDT/EZMEVz1kE3dGp5M2eSFMsciQYGBS+SAadaFe8sTgDeKw5J0rPUhcfcrpUnAXHx3EL+37u5JfPJ2w==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-0/-/apidom-ns-openapi-3-0-0.92.0.tgz", + "integrity": "sha512-VGha4RRnoeoAZBWLGy37YsBzwICM3ZFNyCk2Dwpaqfg9zFN+E6BL2CtIbkxvFkMdwaMURmDItiQsw28pF0tOgQ==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", - "@swagger-api/apidom-ns-json-schema-draft-4": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", + "@swagger-api/apidom-ns-json-schema-draft-4": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1717,14 +1717,29 @@ } }, "node_modules/@swagger-api/apidom-ns-openapi-3-1": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.88.0.tgz", - "integrity": "sha512-RJl74WxWZjiF1iz/7887Lc0hcjS9EZ+IBTzLaZNhr8VYPJG6vkpUV05YOVYUAyY22CMkP4cYtL9pfVni9pYdkA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-openapi-3-1/-/apidom-ns-openapi-3-1-0.92.0.tgz", + "integrity": "sha512-xZD+JxifYhDoTjn76K2ZT3xNoXBQChaKfSkJr4l5Xh9Guuk0IcsPTUDRpuytuZZXVez0O401XFoUso/mZRTjkA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.88.0", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", + "@swagger-api/apidom-ast": "^0.92.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.92.0", + "@types/ramda": "~0.29.6", + "ramda": "~0.29.1", + "ramda-adjunct": "^4.1.1", + "stampit": "^4.3.2" + } + }, + "node_modules/@swagger-api/apidom-ns-workflows-1": { + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-ns-workflows-1/-/apidom-ns-workflows-1-0.92.0.tgz", + "integrity": "sha512-gl1dF+SrRHK4lLiwaK4PMjL9A5z28cW9xiMWCxRyppX/I2bVTVVOfgdAyqLWsFA0gopmITWesJxohRumG35fTw==", + "optional": true, + "dependencies": { + "@babel/runtime-corejs3": "^7.20.7", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", @@ -1732,200 +1747,228 @@ } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-json": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.88.0.tgz", - "integrity": "sha512-sq1RY9hhttG0em6lf0Dj5nGIXQjb0Q3wOGIHsFT4d3FdVoPKMtTbtrSJvaYlETzKrjZCT6HtdmTa7CNtrW/1mA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-json/-/apidom-parser-adapter-api-design-systems-json-0.92.0.tgz", + "integrity": "sha512-i07FeLdNobWzHT9LnfsdOix+XrlZN/KnQL1RODPzxWk7i7ya2e4uc3JemyHh4Tnv04G8JV32SQqtzOtMteJsdA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.88.0", - "@swagger-api/apidom-parser-adapter-json": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-api-design-systems": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-api-design-systems-yaml": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.88.0.tgz", - "integrity": "sha512-jeDD+a9Dt+bcgR9AV5NCks02nL4qP6IOeRxtij+jkHkvC70swCS160tUl4D+ID1TWDPJx689weQuVDlIbwBzMg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-api-design-systems-yaml/-/apidom-parser-adapter-api-design-systems-yaml-0.92.0.tgz", + "integrity": "sha512-bbjFkU0D4zqaZnd8/m1Kyx2UuHpri8ZxLdT1TiXqHweSfRQcNt4VYt0bjWBnnGGBMkHElgYbX5ov6kHvPf3wJg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-api-design-systems": "^0.88.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-api-design-systems": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-json-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.88.0.tgz", - "integrity": "sha512-rGze1i5ItUCuaNgy397YKKniQCTAko6Bi3SLSsRXeb+hLU4z3Bwzw/ImEVUqo/uUVNUGNc0tgGcTah0pBfzb1g==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-json-2/-/apidom-parser-adapter-asyncapi-json-2-0.92.0.tgz", + "integrity": "sha512-Q7gudmGA5TUGbbr0QYNQkndktP91C0WE7uDDS2IwCBtHroRDiMPFCjzE9dsjIST5WnP+LUXmxG1Bv0NLTWcSUg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-json": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-asyncapi-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.88.0.tgz", - "integrity": "sha512-RqR+vq/P0OkyxvLh1Nvaj88TEwEaNvxCBrkC8BkiS+LM4j4XtxHEFbjisuHCi2ANqvvonNN1ccU1Y1mBJv3D1A==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-asyncapi-yaml-2/-/apidom-parser-adapter-asyncapi-yaml-2-0.92.0.tgz", + "integrity": "sha512-V5/VdDj0aeOKp+3AtvPSz2b0HosJfYkHPjNvPU5eafLSzqzMIR/evYq5BvKWoJL1IvLdjoEPqDVVaEZluHZTew==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-asyncapi-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-json": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.88.0.tgz", - "integrity": "sha512-JiMOxnYtr7VjyenjdMc9LH6WhgCNH065vROAakTZqZG814J/iM5HoPjdZbm7yyjl0+4OXoDNm6XPIuMxrwAeiA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-json/-/apidom-parser-adapter-json-0.92.0.tgz", + "integrity": "sha512-KA1Nn6FN0zTA5JhRazwYN9voTDlmExID7Jwz6GXmY826OXqeT4Yl0Egyo1aLYrfT0S73vhC4LVqpdORWLGdZtg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.88.0", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ast": "^0.92.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", - "stampit": "^4.3.2", "tree-sitter": "=0.20.4", "tree-sitter-json": "=0.20.1", "web-tree-sitter": "=0.20.3" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.88.0.tgz", - "integrity": "sha512-RpzHNSpvN1ieAeyyKvK24H3vl0OiSfaPbRYTN6BRthab1dPC5vSjuP7ARwY576vldtUTQX8ltBfuRlB2G0NZXQ==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-2/-/apidom-parser-adapter-openapi-json-2-0.92.0.tgz", + "integrity": "sha512-8OlvjcvI/GuOFJJxN+Mc4tJSo9UWuJdzQtQOtO4k3QwWwS28hGvRTjQ5PpsXAVZoLJMAbDuRdREYD9qeIKvM2g==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-json": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-0": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.88.0.tgz", - "integrity": "sha512-6h01b3QUJ/QNQ8ngNl+edQma3puL2DXIa5rW0VqVOi2e4x0hKbgYdsxx+lE8IV6SVl6t/I+nWKXDjWgZR0GOdA==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-0/-/apidom-parser-adapter-openapi-json-3-0-0.92.0.tgz", + "integrity": "sha512-kzE4COaNobKIUjGsdqqXgO/LruaQHs2kTzOzHPUTR1TH1ZlB2t8MTV+6LJzGNG3IB3QSfZDd7KBEYWklsCTyTA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", - "@swagger-api/apidom-parser-adapter-json": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-json-3-1": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.88.0.tgz", - "integrity": "sha512-r7GKkldiHbL6YAvE6ZUVSW2Sfy70kRHV86esvOj8rQxy2tf7WRsgscjXGY5Gq4BpPWo7lMg31LpBmt5Ahqrrtw==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-json-3-1/-/apidom-parser-adapter-openapi-json-3-1-0.92.0.tgz", + "integrity": "sha512-4gkIXfKGwEKZQ6+kxp4EdFBlAc7Kjq8GAgaC7ilGTSSxIaz5hBHBOJoe3cXWpQ/WlXiOyNCy7WdbuKRpUDKIdg==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", - "@swagger-api/apidom-parser-adapter-json": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.88.0.tgz", - "integrity": "sha512-mFAzDtv9y4oqvaLsKVsJYstpH/6UzKAm2gD4ahj/w5/Gf7lG/3bpSvhzefErUbo8wXG1HcmP2v9AWKrjutyCwg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-2/-/apidom-parser-adapter-openapi-yaml-2-0.92.0.tgz", + "integrity": "sha512-TIY9cytYhA3yUf+5PcwsH9UjzKy5V4nGUtK6n5RvcL4btaGQA2LUB5CiV/1nSvYLNjYjGxhtB3haZDbHe3/gyw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.88.0.tgz", - "integrity": "sha512-1RJ5V9QklKV40N/Q8UrfqjVddynjvqi318lYusCLzFOM13cRgGZKKAztasabSEaI886wiV3rwR+EcH1eH+okQg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-0/-/apidom-parser-adapter-openapi-yaml-3-0-0.92.0.tgz", + "integrity": "sha512-AUwtAxeautYtiwifNCmv6Kjs7ksptRFxcQ3sgLv2bP3f9t5jzcI9NhmgJNdbRfohHYaHMwTuUESrfsTdBgKlAA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.88.0.tgz", - "integrity": "sha512-5JFGJgAAPrAPLuiGzMK9uRUqulO7pORlSslptfWhfTS68ES1X2TCODhFoH0LBCz1xTd5KhJn/7tRNlrPns0+ow==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-openapi-yaml-3-1/-/apidom-parser-adapter-openapi-yaml-3-1-0.92.0.tgz", + "integrity": "sha512-gMR4zUZ/RrjVJVr6DnqwsCsnlplGXJk6O9UKbkoBsiom81dkcHx68BmWA2oM2lYVGKx+G8WVmVDo2EJaZvZYGg==", + "optional": true, + "dependencies": { + "@babel/runtime-corejs3": "^7.20.7", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0", + "@types/ramda": "~0.29.6", + "ramda": "~0.29.1", + "ramda-adjunct": "^4.0.0" + } + }, + "node_modules/@swagger-api/apidom-parser-adapter-workflows-json-1": { + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-json-1/-/apidom-parser-adapter-workflows-json-1-0.92.0.tgz", + "integrity": "sha512-tyLiSxEKeU6mhClFjNxrTQJA2aSgfEF7LJ/ZcJgvREsvyk6ns3op9wN2SXw4UmD+657IgN0aUPihh92aEXKovA==", + "optional": true, + "dependencies": { + "@babel/runtime-corejs3": "^7.20.7", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-workflows-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", + "@types/ramda": "~0.29.6", + "ramda": "~0.29.1", + "ramda-adjunct": "^4.0.0" + } + }, + "node_modules/@swagger-api/apidom-parser-adapter-workflows-yaml-1": { + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-workflows-yaml-1/-/apidom-parser-adapter-workflows-yaml-1-0.92.0.tgz", + "integrity": "sha512-0Nr+5oAocuw3SZXcO8WEqnU7GGWP7O6GrsFafD6KLBL05v3I0erPfmnWQjWh6jBeXv8r5W69WEQItzES0DBJjA==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-ns-workflows-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.0.0" } }, "node_modules/@swagger-api/apidom-parser-adapter-yaml-1-2": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.88.0.tgz", - "integrity": "sha512-MGhcLYecAAp3hIx8pKd8QWzDjP9nLF/iM39BLAoDgt4gPduNJDMVtuHJGvFT75MOjWTuLgc6gfYK7VCZhjrmvg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-parser-adapter-yaml-1-2/-/apidom-parser-adapter-yaml-1-2-0.92.0.tgz", + "integrity": "sha512-cFLqlhehMuY5WRdU1780Vno6iWpjMlr7CfOOloZW1rKf2lvojn0c4eDsyfWFaB2DgE+Xd4CWl55McuaPZMngsw==", "optional": true, "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-ast": "^0.88.0", - "@swagger-api/apidom-core": "^0.88.0", - "@swagger-api/apidom-error": "^0.88.0", + "@swagger-api/apidom-ast": "^0.92.0", + "@swagger-api/apidom-core": "^0.92.0", + "@swagger-api/apidom-error": "^0.92.0", "@types/ramda": "~0.29.6", "ramda": "~0.29.1", "ramda-adjunct": "^4.1.1", - "stampit": "^4.3.2", "tree-sitter": "=0.20.4", "tree-sitter-yaml": "=0.5.0", "web-tree-sitter": "=0.20.3" } }, "node_modules/@swagger-api/apidom-reference": { - "version": "0.88.0", - "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.88.0.tgz", - "integrity": "sha512-rNM8j3JAcCWqNxnxFLm8mqqUT6usLXOU3fTrySZhqch2g1nOkZ0wPjhTV87VrovK/rtEdqLHCBwvfluyrxYXEg==", + "version": "0.92.0", + "resolved": "https://registry.npmjs.org/@swagger-api/apidom-reference/-/apidom-reference-0.92.0.tgz", + "integrity": "sha512-G/qJBTpXCdwPsc5dqPjX+vAfhvtnhIFqnKtEZ71wnEvF7TpIxdeZKKfqpg+Zxi7MSuZD/Gpkr4J/eP0lO0fAdA==", "dependencies": { "@babel/runtime-corejs3": "^7.20.7", - "@swagger-api/apidom-core": "^0.88.0", + "@swagger-api/apidom-core": "^0.92.0", "@types/ramda": "~0.29.6", "axios": "^1.4.0", "minimatch": "^7.4.3", @@ -1935,24 +1978,27 @@ "stampit": "^4.3.2" }, "optionalDependencies": { - "@swagger-api/apidom-error": "^0.88.0", - "@swagger-api/apidom-json-pointer": "^0.88.0", - "@swagger-api/apidom-ns-asyncapi-2": "^0.88.0", - "@swagger-api/apidom-ns-openapi-2": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-0": "^0.88.0", - "@swagger-api/apidom-ns-openapi-3-1": "^0.88.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.88.0", - "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.88.0", - "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-json": "^0.88.0", - "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.88.0", - "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.88.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.88.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.88.0", - "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.88.0", - "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.88.0" + "@swagger-api/apidom-error": "^0.92.0", + "@swagger-api/apidom-json-pointer": "^0.92.0", + "@swagger-api/apidom-ns-asyncapi-2": "^0.92.0", + "@swagger-api/apidom-ns-openapi-2": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-0": "^0.92.0", + "@swagger-api/apidom-ns-openapi-3-1": "^0.92.0", + "@swagger-api/apidom-ns-workflows-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-api-design-systems-json": "^0.92.0", + "@swagger-api/apidom-parser-adapter-api-design-systems-yaml": "^0.92.0", + "@swagger-api/apidom-parser-adapter-asyncapi-json-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-asyncapi-yaml-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-json": "^0.92.0", + "@swagger-api/apidom-parser-adapter-openapi-json-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-openapi-json-3-0": "^0.92.0", + "@swagger-api/apidom-parser-adapter-openapi-json-3-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-openapi-yaml-2": "^0.92.0", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-0": "^0.92.0", + "@swagger-api/apidom-parser-adapter-openapi-yaml-3-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-workflows-json-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-workflows-yaml-1": "^0.92.0", + "@swagger-api/apidom-parser-adapter-yaml-1-2": "^0.92.0" } }, "node_modules/@swagger-api/apidom-reference/node_modules/brace-expansion": { @@ -2208,15 +2254,6 @@ "@types/unist": "^2" } }, - "node_modules/@types/hoist-non-react-statics": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/@types/hoist-non-react-statics/-/hoist-non-react-statics-3.3.3.tgz", - "integrity": "sha512-Wny3a2UXn5FEA1l7gc6BbpoV5mD1XijZqgkp4TRgDCDL5r3B5ieOFGUX5h3n78Tr1MEG7BfvoM8qeztdvNU0fw==", - "dependencies": { - "@types/react": "*", - "hoist-non-react-statics": "^3.3.0" - } - }, "node_modules/@types/json-schema": { "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", @@ -2226,7 +2263,8 @@ "node_modules/@types/prop-types": { "version": "15.7.5", "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz", - "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" + "integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==", + "devOptional": true }, "node_modules/@types/ramda": { "version": "0.29.9", @@ -2240,6 +2278,7 @@ "version": "18.2.47", "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.47.tgz", "integrity": "sha512-xquNkkOirwyCgoClNk85BjP+aqnIS+ckAJ8i37gAbDs14jfW/J23f2GItAf33oiUPQnqNMALiFeoM9Y5mbjpVQ==", + "devOptional": true, "dependencies": { "@types/prop-types": "*", "@types/scheduler": "*", @@ -2258,7 +2297,8 @@ "node_modules/@types/scheduler": { "version": "0.16.3", "resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.3.tgz", - "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==" + "integrity": "sha512-5cJ8CB4yAx7BH1oMvdU0Jh9lrEXyPkar6F9G/ERswkCuvP4KQZfZkSjcMbAICCpQTN4OuZn8tz0HiKv9TGZgrQ==", + "devOptional": true }, "node_modules/@types/semver": { "version": "7.5.6", @@ -2376,15 +2416,15 @@ } }, "node_modules/@typescript-eslint/parser": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.18.0.tgz", - "integrity": "sha512-v6uR68SFvqhNQT41frCMCQpsP+5vySy6IdgjlzUWoo7ALCnpaWYcz/Ij2k4L8cEsL0wkvOviCMpjmtRtHNOKzA==", + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.18.1.tgz", + "integrity": "sha512-zct/MdJnVaRRNy9e84XnVtRv9Vf91/qqe+hZJtKanjojud4wAVy/7lXxJmMyX6X6J+xc6c//YEWvpeif8cAhWA==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "6.18.0", - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/typescript-estree": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0", + "@typescript-eslint/scope-manager": "6.18.1", + "@typescript-eslint/types": "6.18.1", + "@typescript-eslint/typescript-estree": "6.18.1", + "@typescript-eslint/visitor-keys": "6.18.1", "debug": "^4.3.4" }, "engines": { @@ -2403,14 +2443,33 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { + "node_modules/@typescript-eslint/scope-manager": { + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.1.tgz", + "integrity": "sha512-BgdBwXPFmZzaZUuw6wKiHKIovms97a7eTImjkXCZE04TGHysG+0hDQPmygyvgtkoB/aOQwSM/nWv3LzrOIQOBw==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.1", + "@typescript-eslint/visitor-keys": "6.18.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/type-utils": { "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.0.tgz", - "integrity": "sha512-o/UoDT2NgOJ2VfHpfr+KBY2ErWvCySNUIX/X7O9g8Zzt/tXdpfEU43qbNk8LVuWUT2E0ptzTWXh79i74PP0twA==", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.0.tgz", + "integrity": "sha512-ZeMtrXnGmTcHciJN1+u2CigWEEXgy1ufoxtWcHORt5kGvpjjIlK9MUhzHm4RM8iVy6dqSaZA/6PVkX6+r+ChjQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0" + "@typescript-eslint/typescript-estree": "6.18.0", + "@typescript-eslint/utils": "6.18.0", + "debug": "^4.3.4", + "ts-api-utils": "^1.0.1" }, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2418,9 +2477,17 @@ "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { "version": "6.18.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", @@ -2433,7 +2500,7 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { "version": "6.18.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", @@ -2461,7 +2528,7 @@ } } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { + "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { "version": "6.18.0", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", @@ -2478,7 +2545,7 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/brace-expansion": { + "node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", @@ -2487,7 +2554,7 @@ "balanced-match": "^1.0.0" } }, - "node_modules/@typescript-eslint/parser/node_modules/minimatch": { + "node_modules/@typescript-eslint/type-utils/node_modules/minimatch": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", @@ -2502,37 +2569,10 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@typescript-eslint/type-utils": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.0.tgz", - "integrity": "sha512-ZeMtrXnGmTcHciJN1+u2CigWEEXgy1ufoxtWcHORt5kGvpjjIlK9MUhzHm4RM8iVy6dqSaZA/6PVkX6+r+ChjQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/typescript-estree": "6.18.0", - "@typescript-eslint/utils": "6.18.0", - "debug": "^4.3.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^7.0.0 || ^8.0.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", - "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", + "node_modules/@typescript-eslint/types": { + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.1.tgz", + "integrity": "sha512-4TuMAe+tc5oA7wwfqMtB0Y5OrREPF1GeJBAjqwgZh1lEMH5PJQgWgHGfYufVB51LtjD+peZylmeyxUXPfENLCw==", "dev": true, "engines": { "node": "^16.0.0 || >=18.0.0" @@ -2542,14 +2582,14 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", - "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", + "node_modules/@typescript-eslint/typescript-estree": { + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.1.tgz", + "integrity": "sha512-fv9B94UAhywPRhUeeV/v+3SBDvcPiLxRZJw/xZeeGgRLQZ6rLMG+8krrJUyIf6s1ecWTzlsbp0rlw7n9sjufHA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0", + "@typescript-eslint/types": "6.18.1", + "@typescript-eslint/visitor-keys": "6.18.1", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2570,24 +2610,7 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", - "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": { + "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", @@ -2596,7 +2619,7 @@ "balanced-match": "^1.0.0" } }, - "node_modules/@typescript-eslint/type-utils/node_modules/minimatch": { + "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", @@ -2735,6 +2758,23 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.1.tgz", + "integrity": "sha512-/kvt0C5lRqGoCfsbmm7/CwMqoSkY3zzHLIjdhHZQW3VFrnz7ATecOHR7nb7V+xn4286MBxfnQfQhAmCI0u+bJA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "6.18.1", + "eslint-visitor-keys": "^3.4.1" + }, + "engines": { + "node": "^16.0.0 || >=18.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, "node_modules/@ungap/structured-clone": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", @@ -2865,11 +2905,11 @@ } }, "node_modules/axios": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", - "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", + "version": "1.6.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.5.tgz", + "integrity": "sha512-Ii012v05KEVuUoFWmMW/UQv9aRIc3ZwkWDcM+h5Il8izZCtRVpDUfwpoFf7eOtajT3QiGR4yDUx7lPqHJULgbg==", "dependencies": { - "follow-redirects": "^1.15.0", + "follow-redirects": "^1.15.4", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -3038,9 +3078,9 @@ } }, "node_modules/classnames": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz", - "integrity": "sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw==" + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz", + "integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==" }, "node_modules/client-only": { "version": "0.0.1", @@ -3140,9 +3180,9 @@ } }, "node_modules/core-js-pure": { - "version": "3.34.0", - "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.34.0.tgz", - "integrity": "sha512-pmhivkYXkymswFfbXsANmBAewXx86UBfmagP+w0wkK06kLsLlTK5oQmsURPivzMkIBQiYq2cjamcZExIwlFQIg==", + "version": "3.35.0", + "resolved": "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.35.0.tgz", + "integrity": "sha512-f+eRYmkou59uh7BPcyJ8MC76DiGhspj1KMxVIcF24tzP8NA9HVa1uC7BTW2tgx7E1QVCzDzsgp7kArrzhlz8Ew==", "hasInstallScript": true, "funding": { "type": "opencollective", @@ -3170,7 +3210,8 @@ "node_modules/csstype": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz", - "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==" + "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==", + "devOptional": true }, "node_modules/debug": { "version": "4.3.4", @@ -3693,9 +3734,9 @@ "dev": true }, "node_modules/follow-redirects": { - "version": "1.15.4", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.4.tgz", - "integrity": "sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==", + "version": "1.15.5", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.5.tgz", + "integrity": "sha512-vSFWUON1B+yAw1VN4xMfxgn5fTUiaOzAJCKBwIIgT/+7CuGy9+r+5gITvP62j3RmaD5Ph65UaERdOSRGUzZtgw==", "funding": [ { "type": "individual", @@ -4052,14 +4093,6 @@ "node": "*" } }, - "node_modules/hoist-non-react-statics": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-3.3.2.tgz", - "integrity": "sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==", - "dependencies": { - "react-is": "^16.7.0" - } - }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", @@ -4609,9 +4642,9 @@ "dev": true }, "node_modules/node-abi": { - "version": "3.52.0", - "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.52.0.tgz", - "integrity": "sha512-JJ98b02z16ILv7859irtXn4oUaFWADtvkzy2c0IAatNVX2Mc9Yoh8z6hZInn3QwvMEYhHuQloYi+TTQy67SIdQ==", + "version": "3.54.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.54.0.tgz", + "integrity": "sha512-p7eGEiQil0YUV3ItH4/tBb781L5impVmmx2E9FRKF7d18XXzp4PGT2tdYMFY6wQqgxD0IwNZOiSJ0/K0fSi/OA==", "optional": true, "dependencies": { "semver": "^7.3.5" @@ -5211,35 +5244,23 @@ "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==" }, "node_modules/react-redux": { - "version": "8.1.3", - "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-8.1.3.tgz", - "integrity": "sha512-n0ZrutD7DaX/j9VscF+uTALI3oUPa/pO4Z3soOBIjuRn/FzVu6aehhysxZCLi6y7duMf52WNZGMl7CtuK5EnRw==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.1.0.tgz", + "integrity": "sha512-6qoDzIO+gbrza8h3hjMA9aq4nwVFCKFtY2iLxCtVT38Swyy2C/dJCGBXHeHLtx6qlg/8qzc2MrhOeduf5K32wQ==", "dependencies": { - "@babel/runtime": "^7.12.1", - "@types/hoist-non-react-statics": "^3.3.1", "@types/use-sync-external-store": "^0.0.3", - "hoist-non-react-statics": "^3.3.2", - "react-is": "^18.0.0", "use-sync-external-store": "^1.0.0" }, "peerDependencies": { - "@types/react": "^16.8 || ^17.0 || ^18.0", - "@types/react-dom": "^16.8 || ^17.0 || ^18.0", - "react": "^16.8 || ^17.0 || ^18.0", - "react-dom": "^16.8 || ^17.0 || ^18.0", - "react-native": ">=0.59", - "redux": "^4 || ^5.0.0-beta.0" + "@types/react": "^18.2.25", + "react": "^18.0", + "react-native": ">=0.69", + "redux": "^5.0.0" }, "peerDependenciesMeta": { "@types/react": { "optional": true }, - "@types/react-dom": { - "optional": true - }, - "react-dom": { - "optional": true - }, "react-native": { "optional": true }, @@ -5248,11 +5269,6 @@ } } }, - "node_modules/react-redux/node_modules/react-is": { - "version": "18.2.0", - "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz", - "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==" - }, "node_modules/react-remove-scroll": { "version": "2.5.5", "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.5.5.tgz", @@ -5350,12 +5366,9 @@ } }, "node_modules/redux": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/redux/-/redux-4.2.1.tgz", - "integrity": "sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w==", - "dependencies": { - "@babel/runtime": "^7.9.2" - } + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/redux/-/redux-5.0.1.tgz", + "integrity": "sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==" }, "node_modules/redux-immutable": { "version": "4.0.0", @@ -5429,9 +5442,9 @@ "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" }, "node_modules/reselect": { - "version": "4.1.8", - "resolved": "https://registry.npmjs.org/reselect/-/reselect-4.1.8.tgz", - "integrity": "sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ==" + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/reselect/-/reselect-5.1.0.tgz", + "integrity": "sha512-aw7jcGLDpSgNDyWBQLv2cedml85qd95/iszJjN988zX1t7AVRJi19d9kto5+W7oCfQ94gyo40dVbT6g2k4/kXg==" }, "node_modules/resolve-from": { "version": "4.0.0", @@ -5582,14 +5595,15 @@ } }, "node_modules/set-function-length": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz", - "integrity": "sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.0.tgz", + "integrity": "sha512-4DBHDoyHlM1IRPGYcoxexgh67y4ueR53FKV1yyxwFMY7aCqcN/38M1+SwZ/qJQ8iLv7+ck385ot4CcisOAPT9w==", "dependencies": { "define-data-property": "^1.1.1", - "get-intrinsic": "^1.2.1", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.2", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.1" }, "engines": { "node": ">= 0.4" @@ -5807,16 +5821,16 @@ } }, "node_modules/swagger-client": { - "version": "3.24.6", - "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.24.6.tgz", - "integrity": "sha512-vgolnwLjsLCLe3mA9yOuXqmslVzxRpjz0fTBWwPtDGvYSU8FMVra0FGevw+N2OQ80UE1rOqgv4Te0AfvzMyR8g==", + "version": "3.25.0", + "resolved": "https://registry.npmjs.org/swagger-client/-/swagger-client-3.25.0.tgz", + "integrity": "sha512-p143zWkIhgyh2E5+3HPFMlCw3WkV9RbX9HyftfBdiccCbOlmHdcJC0XEJZxcm+ZA+80DORs0F30/mzk7sx4iwA==", "dependencies": { "@babel/runtime-corejs3": "^7.22.15", - "@swagger-api/apidom-core": ">=0.83.0 <1.0.0", - "@swagger-api/apidom-error": ">=0.83.0 <1.0.0", - "@swagger-api/apidom-json-pointer": ">=0.83.0 <1.0.0", - "@swagger-api/apidom-ns-openapi-3-1": ">=0.83.0 <1.0.0", - "@swagger-api/apidom-reference": ">=0.83.0 <1.0.0", + "@swagger-api/apidom-core": ">=0.90.0 <1.0.0", + "@swagger-api/apidom-error": ">=0.90.0 <1.0.0", + "@swagger-api/apidom-json-pointer": ">=0.90.0 <1.0.0", + "@swagger-api/apidom-ns-openapi-3-1": ">=0.90.0 <1.0.0", + "@swagger-api/apidom-reference": ">=0.90.0 <1.0.0", "cookie": "~0.6.0", "deepmerge": "~4.3.0", "fast-json-patch": "^3.0.0-1", @@ -5838,14 +5852,14 @@ } }, "node_modules/swagger-ui-react": { - "version": "5.10.5", - "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.10.5.tgz", - "integrity": "sha512-uBQLku4j3L1NC4/xE3HTgz1EcFisBphh8AnGqbj9LMxeGGcpKOlx/ZDigRAeVXWr9jOnZZbeGBzMe4NVHxPZrQ==", + "version": "5.11.0", + "resolved": "https://registry.npmjs.org/swagger-ui-react/-/swagger-ui-react-5.11.0.tgz", + "integrity": "sha512-iqc5/Z8nvqOdjU2LuWYbREnDmKj5gndZSESTH9dXfymlzLc2NoPQmXZAw02U8kFgHyciX0yDMp3oaCw1zBdPSA==", "dependencies": { - "@babel/runtime-corejs3": "^7.23.5", - "@braintree/sanitize-url": "=6.0.4", + "@babel/runtime-corejs3": "^7.23.7", + "@braintree/sanitize-url": "=7.0.0", "base64-js": "^1.5.1", - "classnames": "^2.3.1", + "classnames": "^2.5.1", "css.escape": "1.5.1", "deep-extend": "0.6.0", "dompurify": "=3.0.6", @@ -5863,23 +5877,23 @@ "react-immutable-proptypes": "2.2.0", "react-immutable-pure-component": "^2.2.0", "react-inspector": "^6.0.1", - "react-redux": "^8.1.3", + "react-redux": "^9.0.4", "react-syntax-highlighter": "^15.5.0", - "redux": "^4.1.2", + "redux": "^5.0.0", "redux-immutable": "^4.0.0", "remarkable": "^2.0.1", - "reselect": "^4.1.8", + "reselect": "^5.0.1", "serialize-error": "^8.1.0", "sha.js": "^2.4.11", - "swagger-client": "^3.24.6", + "swagger-client": "^3.25.0", "url-parse": "^1.5.10", "xml": "=1.0.1", "xml-but-prettier": "^1.0.1", "zenscroll": "^4.0.2" }, "peerDependencies": { - "react": ">=17.0.0", - "react-dom": ">=17.0.0" + "react": ">=16.8.0 <19", + "react-dom": ">=16.8.0 <19" } }, "node_modules/tar-fs": { @@ -5944,9 +5958,12 @@ "integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ==" }, "node_modules/traverse": { - "version": "0.6.7", - "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.7.tgz", - "integrity": "sha512-/y956gpUo9ZNCb99YjxG7OaslxZWHfCHAUUfshwqOXmxUIvqLjVO581BT+gM59+QV9tFe6/CGG53tsA1Y7RSdg==", + "version": "0.6.8", + "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.8.tgz", + "integrity": "sha512-aXJDbk6SnumuaZSANd21XAo15ucCDE38H4fkqiGsc3MhCK+wOlZvLP9cB/TvpHT0mOyWgC4Z8EwRlzqYSUzdsA==", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -6229,9 +6246,9 @@ "peer": true }, "node_modules/web-streams-polyfill": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.2.1.tgz", - "integrity": "sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", + "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", "engines": { "node": ">= 8" } diff --git a/playground/package.json b/playground/package.json index c2d42c05d6..4f630e304a 100644 --- a/playground/package.json +++ b/playground/package.json @@ -14,13 +14,13 @@ "graphql": "^16.8.1", "react": "^18.2.0", "react-dom": "^18.2.0", - "swagger-ui-react": "^5.10.5" + "swagger-ui-react": "^5.11.0" }, "devDependencies": { "@types/react": "^18.2.47", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/parser": "^6.18.0", + "@typescript-eslint/parser": "^6.18.1", "@typescript-eslint/eslint-plugin": "^6.18.0", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", From b74f3096488deb08683dc6b0f30ba2585a303acf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 02:33:10 -0800 Subject: [PATCH 54/60] bot: Bump @typescript-eslint/eslint-plugin from 6.18.0 to 6.18.1 in /playground (#2215) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [@typescript-eslint/eslint-plugin](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/eslint-plugin) from 6.18.0 to 6.18.1.
Release notes

Sourced from @​typescript-eslint/eslint-plugin's releases.

v6.18.1

6.18.1 (2024-01-08)

🩹 Fixes

  • eslint-plugin: [no-non-null-assertion] provide valid fix when member access is on next line (#8185)
  • eslint-plugin: [no-unnecessary-condition] improve checking optional callee (#8178)
  • eslint-plugin: [prefer-readonly] support modifiers of unions and intersections (#8169)
  • eslint-plugin: [switch-exhaustiveness-check] fix new allowDefaultCaseForExhaustiveSwitch option (#8176)
  • typescript-estree: fix invalid parsing error when use update expression on non-null assertion (#8202)
  • typescript-estree: only create project service from env setting if project is enabled (#8136)
  • utils: improve error message on typed rule with invalid parser (#8146)

❤️ Thank You

You can read about our versioning strategy and releases on our website.

Changelog

Sourced from @​typescript-eslint/eslint-plugin's changelog.

6.18.1 (2024-01-08)

🩹 Fixes

  • eslint-plugin: [no-non-null-assertion] provide valid fix when member access is on next line

  • eslint-plugin: [no-unnecessary-condition] improve checking optional callee

  • eslint-plugin: [prefer-readonly] support modifiers of unions and intersections

  • eslint-plugin: [switch-exhaustiveness-check] fix new allowDefaultCaseForExhaustiveSwitch option

❤️ Thank You

  • auvred
  • James
  • Josh Goldberg ✨
  • YeonJuan

You can read about our versioning strategy and releases on our website.

Commits
  • aa7ab0e chore(release): publish 6.18.1
  • 78db0ad chore: fix test formatting in prefer-readonly.test.ts (#8223)
  • 5205192 fix(eslint-plugin): [switch-exhaustiveness-check] fix new allowDefaultCaseFor...
  • 3a219c0 fix(eslint-plugin): [prefer-readonly] support modifiers of unions and interse...
  • 10c0530 fix(eslint-plugin): [no-unnecessary-condition] improve checking optional call...
  • 27d6ac1 fix(eslint-plugin): [no-non-null-assertion] provide valid fix when member acc...
  • 1ee3087 chore: enable eslint-plugin-jsdoc internally (#8145)
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@typescript-eslint/eslint-plugin&package-manager=npm_and_yarn&previous-version=6.18.0&new-version=6.18.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 266 +++-------------------------------- playground/package.json | 2 +- 2 files changed, 20 insertions(+), 248 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index 323e6f16d0..fddbab1778 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -18,7 +18,7 @@ "@types/react": "^18.2.47", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", - "@typescript-eslint/eslint-plugin": "^6.18.0", + "@typescript-eslint/eslint-plugin": "^6.18.1", "@typescript-eslint/parser": "^6.18.1", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", @@ -2334,16 +2334,16 @@ "integrity": "sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA==" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.18.0.tgz", - "integrity": "sha512-3lqEvQUdCozi6d1mddWqd+kf8KxmGq2Plzx36BlkjuQe3rSTm/O98cLf0A4uDO+a5N1KD2SeEEl6fW97YHY+6w==", + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-6.18.1.tgz", + "integrity": "sha512-nISDRYnnIpk7VCFrGcu1rnZfM1Dh9LRHnfgdkjcbi/l7g16VYRri3TjXi9Ir4lOZSw5N/gnV/3H7jIPQ8Q4daA==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.5.1", - "@typescript-eslint/scope-manager": "6.18.0", - "@typescript-eslint/type-utils": "6.18.0", - "@typescript-eslint/utils": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0", + "@typescript-eslint/scope-manager": "6.18.1", + "@typescript-eslint/type-utils": "6.18.1", + "@typescript-eslint/utils": "6.18.1", + "@typescript-eslint/visitor-keys": "6.18.1", "debug": "^4.3.4", "graphemer": "^1.4.0", "ignore": "^5.2.4", @@ -2368,53 +2368,6 @@ } } }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.0.tgz", - "integrity": "sha512-o/UoDT2NgOJ2VfHpfr+KBY2ErWvCySNUIX/X7O9g8Zzt/tXdpfEU43qbNk8LVuWUT2E0ptzTWXh79i74PP0twA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", - "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", - "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/parser": { "version": "6.18.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.18.1.tgz", @@ -2461,13 +2414,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.0.tgz", - "integrity": "sha512-ZeMtrXnGmTcHciJN1+u2CigWEEXgy1ufoxtWcHORt5kGvpjjIlK9MUhzHm4RM8iVy6dqSaZA/6PVkX6+r+ChjQ==", + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-6.18.1.tgz", + "integrity": "sha512-wyOSKhuzHeU/5pcRDP2G2Ndci+4g653V43gXTpt4nbyoIOAASkGDA9JIAgbQCdCkcr1MvpSYWzxTz0olCn8+/Q==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "6.18.0", - "@typescript-eslint/utils": "6.18.0", + "@typescript-eslint/typescript-estree": "6.18.1", + "@typescript-eslint/utils": "6.18.1", "debug": "^4.3.4", "ts-api-utils": "^1.0.1" }, @@ -2487,88 +2440,6 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", - "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", - "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "minimatch": "9.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", - "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@typescript-eslint/types": { "version": "6.18.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.1.tgz", @@ -2635,17 +2506,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.18.0.tgz", - "integrity": "sha512-wiKKCbUeDPGaYEYQh1S580dGxJ/V9HI7K5sbGAVklyf+o5g3O+adnS4UNJajplF4e7z2q0uVBaTdT/yLb4XAVA==", + "version": "6.18.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-6.18.1.tgz", + "integrity": "sha512-zZmTuVZvD1wpoceHvoQpOiewmWu3uP9FuTWo8vqpy2ffsmfCE8mklRPi+vmnIYAIk9t/4kOThri2QCDgor+OpQ==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@types/json-schema": "^7.0.12", "@types/semver": "^7.5.0", - "@typescript-eslint/scope-manager": "6.18.0", - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/typescript-estree": "6.18.0", + "@typescript-eslint/scope-manager": "6.18.1", + "@typescript-eslint/types": "6.18.1", + "@typescript-eslint/typescript-estree": "6.18.1", "semver": "^7.5.4" }, "engines": { @@ -2659,105 +2530,6 @@ "eslint": "^7.0.0 || ^8.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-6.18.0.tgz", - "integrity": "sha512-o/UoDT2NgOJ2VfHpfr+KBY2ErWvCySNUIX/X7O9g8Zzt/tXdpfEU43qbNk8LVuWUT2E0ptzTWXh79i74PP0twA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-6.18.0.tgz", - "integrity": "sha512-/RFVIccwkwSdW/1zeMx3hADShWbgBxBnV/qSrex6607isYjj05t36P6LyONgqdUrNLl5TYU8NIKdHUYpFvExkA==", - "dev": true, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-6.18.0.tgz", - "integrity": "sha512-klNvl+Ql4NsBNGB4W9TZ2Od03lm7aGvTbs0wYaFYsplVPhr+oeXjlPZCDI4U9jgJIDK38W1FKhacCFzCC+nbIg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "@typescript-eslint/visitor-keys": "6.18.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "minimatch": "9.0.3", - "semver": "^7.5.4", - "ts-api-utils": "^1.0.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "6.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.0.tgz", - "integrity": "sha512-1wetAlSZpewRDb2h9p/Q8kRjdGuqdTAQbkJIOUMLug2LBLG+QOjiWoSj6/3B/hA9/tVTFFdtiKvAYoYnSRW/RA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "6.18.0", - "eslint-visitor-keys": "^3.4.1" - }, - "engines": { - "node": "^16.0.0 || >=18.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@typescript-eslint/visitor-keys": { "version": "6.18.1", "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-6.18.1.tgz", diff --git a/playground/package.json b/playground/package.json index 4f630e304a..d519537174 100644 --- a/playground/package.json +++ b/playground/package.json @@ -21,7 +21,7 @@ "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/parser": "^6.18.1", - "@typescript-eslint/eslint-plugin": "^6.18.0", + "@typescript-eslint/eslint-plugin": "^6.18.1", "@vitejs/plugin-react-swc": "^3.5.0", "eslint": "^8.56.0", "eslint-plugin-react-hooks": "^4.6.0", From ad5e98af9562d0fa3300f8244a9de1990b45848a Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 15 Jan 2024 17:03:28 -0500 Subject: [PATCH 55/60] fix(i): Prevent double-sided relations in views (#2222) ## Relevant issue(s) Resolves #2221 ## Description Prevents users from defining double-sided relations in views. I'm pretty sure this was discussed during the development of views, and accepted as a means of simplifying initial development (lots of stuff gets really complicated if we allow this). Is just that I only permitted one sided relations, I never blocked off double sided relations. --- request/graphql/schema/collection.go | 7 ++- request/graphql/schema/errors.go | 18 ++++++-- .../view/one_to_many/simple_test.go | 43 +++++++++++++++++++ 3 files changed, 63 insertions(+), 5 deletions(-) diff --git a/request/graphql/schema/collection.go b/request/graphql/schema/collection.go index f386c752ed..36f4d61c71 100644 --- a/request/graphql/schema/collection.go +++ b/request/graphql/schema/collection.go @@ -536,7 +536,12 @@ func finalizeRelations(relationManager *RelationManager, definitions []client.Co // if not finalized then we are missing one side of the relationship // unless this is an embedded object, which only have single-sided relations - if _, ok := embeddedObjNames[field.Schema]; !ok && !rel.finalized { + _, shouldBeOneSidedRelation := embeddedObjNames[field.Schema] + if shouldBeOneSidedRelation && rel.finalized { + return NewErrViewRelationMustBeOneSided(field.Name, field.Schema) + } + + if !shouldBeOneSidedRelation && !rel.finalized { return client.NewErrRelationOneSided(field.Name, field.Schema) } diff --git a/request/graphql/schema/errors.go b/request/graphql/schema/errors.go index 39bbbd803a..e832e687ee 100644 --- a/request/graphql/schema/errors.go +++ b/request/graphql/schema/errors.go @@ -27,6 +27,7 @@ const ( errIndexUnknownArgument string = "index with unknown argument" errIndexInvalidArgument string = "index with invalid argument" errIndexInvalidName string = "index with invalid name" + errViewRelationMustBeOneSided string = "relations in views must only be defined on one schema" ) var ( @@ -46,10 +47,11 @@ var ( ErrMultipleRelationPrimaries = errors.New("relation can only have a single field set as primary") // NonNull is the literal name of the GQL type, so we have to disable the linter //nolint:revive - ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") - ErrIndexMissingFields = errors.New(errIndexMissingFields) - ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) - ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) + ErrNonNullNotSupported = errors.New("NonNull fields are not currently supported") + ErrIndexMissingFields = errors.New(errIndexMissingFields) + ErrIndexWithUnknownArg = errors.New(errIndexUnknownArgument) + ErrIndexWithInvalidArg = errors.New(errIndexInvalidArgument) + ErrViewRelationMustBeOneSided = errors.New(errViewRelationMustBeOneSided) ) func NewErrDuplicateField(objectName, fieldName string) error { @@ -130,3 +132,11 @@ func NewErrRelationNotFound(relationName string) error { errors.NewKV("RelationName", relationName), ) } + +func NewErrViewRelationMustBeOneSided(fieldName string, typeName string) error { + return errors.New( + errViewRelationMustBeOneSided, + errors.NewKV("Field", fieldName), + errors.NewKV("Type", typeName), + ) +} diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index 1eb4dfb7f2..e5306e009d 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -363,3 +363,46 @@ func TestView_OneToManyMultipleViewsWithEmbeddedSchema(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_OneToManyWithDoubleSidedRelation_Errors(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + author: AuthorView + } + `, + ExpectedError: "relations in views must only be defined on one schema", + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From fee8271bddd05c01454a45fc9e91a91c5884864d Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 15 Jan 2024 17:38:23 -0500 Subject: [PATCH 56/60] test(i): Add view of view tests (#2220) ## Relevant issue(s) Resolves #2218 ## Description This works, but was previously untested. --- .../view/one_to_many/simple_test.go | 94 +++++++++++++++++++ tests/integration/view/simple/simple_test.go | 60 ++++++++++++ 2 files changed, 154 insertions(+) diff --git a/tests/integration/view/one_to_many/simple_test.go b/tests/integration/view/one_to_many/simple_test.go index e5306e009d..f6ccd699b8 100644 --- a/tests/integration/view/one_to_many/simple_test.go +++ b/tests/integration/view/one_to_many/simple_test.go @@ -367,6 +367,100 @@ func TestView_OneToManyMultipleViewsWithEmbeddedSchema(t *testing.T) { func TestView_OneToManyWithDoubleSidedRelation_Errors(t *testing.T) { test := testUtils.TestCase{ Description: "One to many view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type Author { + name: String + books: [Book] + } + type Book { + name: String + author: Author + } + `, + }, + testUtils.CreateView{ + Query: ` + Author { + name + books { + name + } + } + `, + SDL: ` + type AuthorView { + name: String + books: [BookView] + } + interface BookView { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + AuthorView { + name + books { + name + } + } + `, + SDL: ` + type AuthorViewView { + name: String + books: [BookViewView] + } + interface BookViewView { + name: String + } + `, + }, + // bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d + testUtils.CreateDoc{ + CollectionID: 0, + Doc: `{ + "name": "Harper Lee" + }`, + }, + testUtils.CreateDoc{ + CollectionID: 1, + Doc: `{ + "name": "To Kill a Mockingbird", + "author_id": "bae-ef9cd756-08e1-5f23-abeb-7b3e6351a68d" + }`, + }, + testUtils.Request{ + Request: `query { + AuthorViewView { + name + books { + name + } + } + }`, + Results: []map[string]any{ + { + "name": "Harper Lee", + "books": []map[string]any{ + { + "name": "To Kill a Mockingbird", + }, + }, + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} + +func TestView_OneToManyViewOfView(t *testing.T) { + test := testUtils.TestCase{ + Description: "One to many view of view", Actions: []any{ testUtils.SchemaUpdate{ Schema: ` diff --git a/tests/integration/view/simple/simple_test.go b/tests/integration/view/simple/simple_test.go index 802e281391..5dd74da8ed 100644 --- a/tests/integration/view/simple/simple_test.go +++ b/tests/integration/view/simple/simple_test.go @@ -260,3 +260,63 @@ func TestView_SimpleWithExtraFieldInViewQuery(t *testing.T) { testUtils.ExecuteTestCase(t, test) } + +func TestView_SimpleViewOfView(t *testing.T) { + test := testUtils.TestCase{ + Description: "Simple view of view", + Actions: []any{ + testUtils.SchemaUpdate{ + Schema: ` + type User { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + User { + name + } + `, + SDL: ` + type UserView { + name: String + } + `, + }, + testUtils.CreateView{ + Query: ` + UserView { + name + } + `, + SDL: ` + type UserViewView { + name: String + } + `, + }, + testUtils.CreateDoc{ + Doc: `{ + "name": "John" + }`, + }, + testUtils.Request{ + Request: ` + query { + UserViewView { + name + } + } + `, + Results: []map[string]any{ + { + "name": "John", + }, + }, + }, + }, + } + + testUtils.ExecuteTestCase(t, test) +} From 63549e78a759897f4e1936016d4e3c35e853783b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 16:08:23 -0800 Subject: [PATCH 57/60] bot: Bump @types/react from 18.2.47 to 18.2.48 in /playground (#2213) Bumps [@types/react](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/HEAD/types/react) from 18.2.47 to 18.2.48.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=@types/react&package-manager=npm_and_yarn&previous-version=18.2.47&new-version=18.2.48)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- playground/package-lock.json | 8 ++++---- playground/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/playground/package-lock.json b/playground/package-lock.json index fddbab1778..a38ae85877 100644 --- a/playground/package-lock.json +++ b/playground/package-lock.json @@ -15,7 +15,7 @@ "swagger-ui-react": "^5.11.0" }, "devDependencies": { - "@types/react": "^18.2.47", + "@types/react": "^18.2.48", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/eslint-plugin": "^6.18.1", @@ -2275,9 +2275,9 @@ } }, "node_modules/@types/react": { - "version": "18.2.47", - "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.47.tgz", - "integrity": "sha512-xquNkkOirwyCgoClNk85BjP+aqnIS+ckAJ8i37gAbDs14jfW/J23f2GItAf33oiUPQnqNMALiFeoM9Y5mbjpVQ==", + "version": "18.2.48", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.2.48.tgz", + "integrity": "sha512-qboRCl6Ie70DQQG9hhNREz81jqC1cs9EVNcjQ1AU+jH6NFfSAhVVbrrY/+nSF+Bsk4AOwm9Qa61InvMCyV+H3w==", "devOptional": true, "dependencies": { "@types/prop-types": "*", diff --git a/playground/package.json b/playground/package.json index d519537174..2532afe5af 100644 --- a/playground/package.json +++ b/playground/package.json @@ -17,7 +17,7 @@ "swagger-ui-react": "^5.11.0" }, "devDependencies": { - "@types/react": "^18.2.47", + "@types/react": "^18.2.48", "@types/react-dom": "^18.2.18", "@types/swagger-ui-react": "^4.18.3", "@typescript-eslint/parser": "^6.18.1", From d1142a51b68658fc946949c88feb992646edae78 Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Tue, 16 Jan 2024 15:19:43 -0500 Subject: [PATCH 58/60] fix(i): Remove deprecated tcpaddr flag from readme (#2225) ## Relevant issue(s) Resolves #2224 ## Description This PR removes mention of the deprecated `tcpaddr` flag from the readme. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ed993387f3..0e5997b902 100644 --- a/README.md +++ b/README.md @@ -334,7 +334,7 @@ defradb client schema add ' Start (or continue running from above) *nodeB*, that will be receiving updates: ```shell -defradb start --rootdir ~/.defradb-nodeB --url localhost:9182 --p2paddr /ip4/0.0.0.0/tcp/9172 --tcpaddr /ip4/0.0.0.0/tcp/9162 +defradb start --rootdir ~/.defradb-nodeB --url localhost:9182 --p2paddr /ip4/0.0.0.0/tcp/9172 ``` Here we *do not* specify `--peers` as we will manually define a replicator after startup via the `rpc` client command. From 58dc727a5e12ebecdd0adadec5188bd4b2a6697a Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Thu, 18 Jan 2024 12:55:31 -0500 Subject: [PATCH 59/60] chore(i): Split build by OS for release workflow (#2231) ## Relevant issue(s) Resolves #2227 ## Description This PR modifies the release workflow to split the build on the 3 different OS. This is needed now that we use wasmtime and CGO is needed for compilation. --- .github/workflows/release.yml | 125 +++++++++++++++++++++++++++++++--- .goreleaser.yaml | 10 +-- 2 files changed, 118 insertions(+), 17 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7d226a19fc..bfcdf9666f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,20 +23,17 @@ permissions: issues: write jobs: - goreleaser: - runs-on: ubuntu-latest + prepare: + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + runs-on: ${{ matrix.os }} steps: - name: Checkout code into the directory uses: actions/checkout@v3 with: fetch-depth: 0 - - name: Setup Go environment explicitly - uses: actions/setup-go@v3 - with: - go-version: "1.20" - check-latest: true - - name: Apply tag run: git tag ${{ github.event.inputs.tag }} @@ -45,27 +42,135 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@v2 + if: matrix.os == 'ubuntu-latest' - name: Log in to Docker Hub uses: docker/login-action@v2 + if: matrix.os == 'ubuntu-latest' with: username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} - name: Log in to the Container registry uses: docker/login-action@v2 + if: matrix.os == 'ubuntu-latest' with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup Go environment explicitly + uses: actions/setup-go@v3 + with: + go-version: "1.20" + check-latest: true + cache: true + + - shell: bash + run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + - uses: actions/cache@v4 + if: matrix.os == 'ubuntu-latest' + with: + path: dist/linux_amd64 + key: linux-${{ env.sha_short }} + - uses: actions/cache@v4 + if: matrix.os == 'macos-latest' + with: + path: dist/darwin_amd64 + key: darwin-${{ env.sha_short }} + - uses: actions/cache@v4 + if: matrix.os == 'windows-latest' + with: + path: dist/windows_amd64 + key: windows-${{ env.sha_short }} + enableCrossOsArchive: true - name: Run GoReleaser uses: goreleaser/goreleaser-action@v5 with: distribution: goreleaser-pro version: latest - args: release --clean + args: release --clean --split ${{ env.flags }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REPOSITORY: ${{ github.repository }} + GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} + + release: + runs-on: ubuntu-latest + needs: prepare + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Apply tag + run: git tag ${{ github.event.inputs.tag }} + + - name: Setup Go environment explicitly + uses: actions/setup-go@v3 + with: + go-version: "1.20" + check-latest: true + cache: true + + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + # copy the cashes from prepare + - shell: bash + run: echo "sha_short=$(git rev-parse --short HEAD)" >> $GITHUB_ENV + - uses: actions/cache@v4 + with: + path: dist/linux_amd64 + key: linux-${{ env.sha_short }} + - uses: actions/cache@v4 + with: + path: dist/darwin_amd64 + key: darwin-${{ env.sha_short }} + - uses: actions/cache@v4 + with: + path: dist/windows_amd64 + key: windows-${{ env.sha_short }} + enableCrossOsArchive: true + + + # release + - uses: goreleaser/goreleaser-action@v5 + if: steps.cache.outputs.cache-hit != 'true' # do not run if cache hit + with: + distribution: goreleaser-pro + version: latest + args: continue --merge env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_REPOSITORY: ${{ github.repository }} - GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} \ No newline at end of file + GORELEASER_KEY: ${{ secrets.GORELEASER_KEY }} + + pull-docker-image: + name: Pull docker image job + runs-on: ubuntu-latest + needs: prepare + + strategy: + fail-fast: false + matrix: + image_tag: + - sourcenetwork/defradb:latest + - ghcr.io/sourcenetwork/defradb:latest + + steps: + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Pull Docker image + run: docker pull ${{ matrix.image_tag }} + + - name: Test Docker image + run: docker run --rm ${{ matrix.image_tag }} \ No newline at end of file diff --git a/.goreleaser.yaml b/.goreleaser.yaml index 87c1561a22..119447d180 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -1,17 +1,10 @@ version: 1 -dist: ./build - before: hooks: - go mod tidy - make deps:playground -after: - hooks: - - cmd: docker pull {{ .Env.GITHUB_REPOSITORY }}:latest - - cmd: docker run --rm {{ .Env.GITHUB_REPOSITORY }}:latest - builds: - id: "defradb" main: ./cmd/defradb @@ -34,6 +27,9 @@ builds: goarch: - amd64 - arm64 + +partial: + by: target archives: - id: defradb_playground From 3261c516d0b64c33e4dd973807fec392dc73aa5e Mon Sep 17 00:00:00 2001 From: Fred Carle Date: Tue, 16 Jan 2024 14:47:56 -0500 Subject: [PATCH 60/60] Release v0.9.0 --- CHANGELOG.md | 77 ++++++++++++++++++++++++++++++++++++++++++++++++ licenses/BSL.txt | 4 +-- 2 files changed, 79 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e5bcd6cfbe..6fa2ed67e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,80 @@ + +## [v0.9.0](https://github.com/sourcenetwork/defradb/compare/v0.8.0...v0.9.0) + +> 2024-01-18 + +DefraDB v0.9 is a major pre-production release. Until the stable version 1.0 is reached, the SemVer minor patch number will denote notable releases, which will give the project freedom to experiment and explore potentially breaking changes. + +To get a full outline of the changes, we invite you to review the official changelog below. This release does include a Breaking Change to existing v0.8.x databases. If you need help migrating an existing deployment, reach out at [hello@source.network](mailto:hello@source.network) or join our Discord at https://discord.gg/w7jYQVJ/. + +### Features + +* Mutation typed input ([#2167](https://github.com/sourcenetwork/defradb/issues/2167)) +* Add PN Counter CRDT type ([#2119](https://github.com/sourcenetwork/defradb/issues/2119)) +* Allow users to add Views ([#2114](https://github.com/sourcenetwork/defradb/issues/2114)) +* Add unique secondary index ([#2131](https://github.com/sourcenetwork/defradb/issues/2131)) +* New cmd for docs auto generation ([#2096](https://github.com/sourcenetwork/defradb/issues/2096)) +* Add blob scalar type ([#2091](https://github.com/sourcenetwork/defradb/issues/2091)) + +### Fixes + +* Add entropy to counter CRDT type updates ([#2186](https://github.com/sourcenetwork/defradb/issues/2186)) +* Handle multiple nil values on unique indexed fields ([#2178](https://github.com/sourcenetwork/defradb/issues/2178)) +* Filtering on unique index if there is no match ([#2177](https://github.com/sourcenetwork/defradb/issues/2177)) + +### Performance + +* Switch LensVM to wasmtime runtime ([#2030](https://github.com/sourcenetwork/defradb/issues/2030)) + +### Refactoring + +* Add strong typing to document creation ([#2161](https://github.com/sourcenetwork/defradb/issues/2161)) +* Rename key,id,dockey to docID terminology ([#1749](https://github.com/sourcenetwork/defradb/issues/1749)) +* Simplify Merkle CRDT workflow ([#2111](https://github.com/sourcenetwork/defradb/issues/2111)) + +### Testing + +* Add auto-doc generation ([#2051](https://github.com/sourcenetwork/defradb/issues/2051)) + +### Continuous integration + +* Add windows test runner ([#2033](https://github.com/sourcenetwork/defradb/issues/2033)) + +### Chore + +* Update Lens to v0.5 ([#2083](https://github.com/sourcenetwork/defradb/issues/2083)) + +### Bot + +* Bump [@types](https://github.com/types)/react from 18.2.47 to 18.2.48 in /playground ([#2213](https://github.com/sourcenetwork/defradb/issues/2213)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 6.18.0 to 6.18.1 in /playground ([#2215](https://github.com/sourcenetwork/defradb/issues/2215)) +* Update dependencies (bulk dependabot PRs) 15-01-2024 ([#2217](https://github.com/sourcenetwork/defradb/issues/2217)) +* Bump follow-redirects from 1.15.3 to 1.15.4 in /playground ([#2181](https://github.com/sourcenetwork/defradb/issues/2181)) +* Bump github.com/getkin/kin-openapi from 0.120.0 to 0.122.0 ([#2097](https://github.com/sourcenetwork/defradb/issues/2097)) +* Update dependencies (bulk dependabot PRs) 08-01-2024 ([#2173](https://github.com/sourcenetwork/defradb/issues/2173)) +* Bump github.com/bits-and-blooms/bitset from 1.12.0 to 1.13.0 ([#2160](https://github.com/sourcenetwork/defradb/issues/2160)) +* Bump [@types](https://github.com/types)/react from 18.2.45 to 18.2.46 in /playground ([#2159](https://github.com/sourcenetwork/defradb/issues/2159)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 6.15.0 to 6.16.0 in /playground ([#2156](https://github.com/sourcenetwork/defradb/issues/2156)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 6.15.0 to 6.16.0 in /playground ([#2155](https://github.com/sourcenetwork/defradb/issues/2155)) +* Update dependencies (bulk dependabot PRs) 27-12-2023 ([#2154](https://github.com/sourcenetwork/defradb/issues/2154)) +* Bump github.com/spf13/viper from 1.17.0 to 1.18.2 ([#2145](https://github.com/sourcenetwork/defradb/issues/2145)) +* Bump golang.org/x/crypto from 0.16.0 to 0.17.0 ([#2144](https://github.com/sourcenetwork/defradb/issues/2144)) +* Update dependencies (bulk dependabot PRs) 18-12-2023 ([#2142](https://github.com/sourcenetwork/defradb/issues/2142)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 6.13.2 to 6.14.0 in /playground ([#2136](https://github.com/sourcenetwork/defradb/issues/2136)) +* Bump [@types](https://github.com/types)/react from 18.2.43 to 18.2.45 in /playground ([#2134](https://github.com/sourcenetwork/defradb/issues/2134)) +* Bump vite from 5.0.7 to 5.0.10 in /playground ([#2135](https://github.com/sourcenetwork/defradb/issues/2135)) +* Update dependencies (bulk dependabot PRs) 04-12-2023 ([#2133](https://github.com/sourcenetwork/defradb/issues/2133)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 6.13.1 to 6.13.2 in /playground ([#2109](https://github.com/sourcenetwork/defradb/issues/2109)) +* Bump vite from 5.0.2 to 5.0.5 in /playground ([#2112](https://github.com/sourcenetwork/defradb/issues/2112)) +* Bump [@types](https://github.com/types)/react from 18.2.41 to 18.2.42 in /playground ([#2108](https://github.com/sourcenetwork/defradb/issues/2108)) +* Update dependencies (bulk dependabot PRs) 04-12-2023 ([#2107](https://github.com/sourcenetwork/defradb/issues/2107)) +* Bump [@types](https://github.com/types)/react from 18.2.38 to 18.2.39 in /playground ([#2086](https://github.com/sourcenetwork/defradb/issues/2086)) +* Bump [@typescript](https://github.com/typescript)-eslint/parser from 6.12.0 to 6.13.0 in /playground ([#2085](https://github.com/sourcenetwork/defradb/issues/2085)) +* Update dependencies (bulk dependabot PRs) 27-11-2023 ([#2081](https://github.com/sourcenetwork/defradb/issues/2081)) +* Bump swagger-ui-react from 5.10.0 to 5.10.3 in /playground ([#2067](https://github.com/sourcenetwork/defradb/issues/2067)) +* Bump [@typescript](https://github.com/typescript)-eslint/eslint-plugin from 6.11.0 to 6.12.0 in /playground ([#2068](https://github.com/sourcenetwork/defradb/issues/2068)) +* Update dependencies (bulk dependabot PRs) 20-11-2023 ([#2066](https://github.com/sourcenetwork/defradb/issues/2066)) + ## [v0.8.0](https://github.com/sourcenetwork/defradb/compare/v0.7.0...v0.8.0) diff --git a/licenses/BSL.txt b/licenses/BSL.txt index bd545e07fc..9aef00b6d8 100644 --- a/licenses/BSL.txt +++ b/licenses/BSL.txt @@ -7,7 +7,7 @@ Parameters Licensor: Democratized Data (D2) Foundation -Licensed Work: DefraDB v0.8.0 +Licensed Work: DefraDB v0.9.0 The Licensed Work is (c) 2023 D2 Foundation. @@ -28,7 +28,7 @@ Additional Use Grant: You may only use the Licensed Work for the -Change Date: 2027-11-14 +Change Date: 2028-01-16 Change License: Apache License, Version 2.0