From 2b701544c4bd0ac15b8ef43e9f08c4ecd46995f8 Mon Sep 17 00:00:00 2001 From: AndrewSisley Date: Mon, 17 Jun 2024 09:15:37 -0400 Subject: [PATCH] refactor: Rework definition validation (#2720) ## Relevant issue(s) Resolves #2537 ## Description Reworks definition validation, standardizing the rule signatures, allowing rule reuse across different contexts, and hopefully improving their readability. Performance of the rules will have decreased slightly, but on col/schema update that is unimportant, performance of `createCollections` (called when creating via SDL docs) has probably improved slightly due to a reduction in datastore calls. --- internal/db/collection_define.go | 169 ++-- internal/db/definition_validation.go | 829 ++++++++++++------ internal/db/errors.go | 14 +- internal/db/schema.go | 59 +- internal/db/view.go | 36 +- .../updates/copy/name_test.go | 2 +- .../updates/replace/name_test.go | 2 +- tests/integration/schema/get_schema_test.go | 12 +- .../schema/migrations/query/simple_test.go | 42 +- .../migrations/query/with_doc_id_test.go | 4 +- .../migrations/query/with_inverse_test.go | 6 +- .../query/with_p2p_schema_branch_test.go | 2 +- .../schema/migrations/query/with_p2p_test.go | 14 +- .../migrations/query/with_restart_test.go | 4 +- .../migrations/query/with_set_default_test.go | 6 +- .../schema/migrations/query/with_txn_test.go | 4 +- .../migrations/query/with_update_test.go | 4 +- .../schema/migrations/simple_test.go | 4 +- tests/integration/schema/one_one_test.go | 4 +- .../updates/add/field/create_update_test.go | 4 +- .../schema/updates/add/field/simple_test.go | 10 +- .../schema/updates/add/simple_test.go | 2 +- .../schema/updates/copy/field/simple_test.go | 2 +- .../schema/updates/copy/simple_test.go | 2 +- .../schema/updates/remove/simple_test.go | 4 +- .../schema/updates/replace/simple_test.go | 2 +- .../schema/updates/with_schema_branch_test.go | 28 +- .../schema/with_update_set_default_test.go | 2 +- 28 files changed, 807 insertions(+), 466 deletions(-) diff --git a/internal/db/collection_define.go b/internal/db/collection_define.go index 4712911399..a8b9fe9abd 100644 --- a/internal/db/collection_define.go +++ b/internal/db/collection_define.go @@ -25,113 +25,126 @@ import ( "github.com/sourcenetwork/defradb/internal/db/description" ) -func (db *db) createCollection( +func (db *db) createCollections( ctx context.Context, - def client.CollectionDefinition, newDefinitions []client.CollectionDefinition, -) (client.Collection, error) { - schema := def.Schema - desc := def.Description - txn := mustGetContextTxn(ctx) - - if desc.Name.HasValue() { - exists, err := description.HasCollectionByName(ctx, txn, desc.Name.Value()) - if err != nil { - return nil, err - } - if exists { - return nil, ErrCollectionAlreadyExists - } - } +) ([]client.CollectionDefinition, error) { + returnDescriptions := make([]client.CollectionDefinition, len(newDefinitions)) existingDefinitions, err := db.getAllActiveDefinitions(ctx) if err != nil { return nil, err } - schemaByName := map[string]client.SchemaDescription{} - for _, existingDefinition := range existingDefinitions { - schemaByName[existingDefinition.Schema.Name] = existingDefinition.Schema - } - for _, newDefinition := range newDefinitions { - schemaByName[newDefinition.Schema.Name] = newDefinition.Schema - } + txn := mustGetContextTxn(ctx) - _, err = validateUpdateSchemaFields(schemaByName, client.SchemaDescription{}, schema) - if err != nil { - return nil, err - } + for i, def := range newDefinitions { + schemaByName := map[string]client.SchemaDescription{} + for _, existingDefinition := range existingDefinitions { + schemaByName[existingDefinition.Schema.Name] = existingDefinition.Schema + } + for _, newDefinition := range newDefinitions { + schemaByName[newDefinition.Schema.Name] = newDefinition.Schema + } - definitionsByName := map[string]client.CollectionDefinition{} - for _, existingDefinition := range existingDefinitions { - definitionsByName[existingDefinition.GetName()] = existingDefinition - } - for _, newDefinition := range newDefinitions { - definitionsByName[newDefinition.GetName()] = newDefinition - } - err = db.validateNewCollection(def, definitionsByName) - if err != nil { - return nil, err + schema, err := description.CreateSchemaVersion(ctx, txn, def.Schema) + if err != nil { + return nil, err + } + newDefinitions[i].Description.SchemaVersionID = schema.VersionID + newDefinitions[i].Schema = schema } - colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) - if err != nil { - return nil, err - } - colID, err := colSeq.next(ctx) - if err != nil { - return nil, err - } + for i, def := range newDefinitions { + if len(def.Description.Fields) == 0 { + // This is a schema-only definition, we should not create a collection for it + continue + } - fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(uint32(colID))) - if err != nil { - return nil, err - } + colSeq, err := db.getSequence(ctx, core.CollectionIDSequenceKey{}) + if err != nil { + return nil, err + } + colID, err := colSeq.next(ctx) + if err != nil { + return nil, err + } - desc.ID = uint32(colID) - desc.RootID = desc.ID + fieldSeq, err := db.getSequence(ctx, core.NewFieldIDSequenceKey(uint32(colID))) + if err != nil { + return nil, err + } - schema, err = description.CreateSchemaVersion(ctx, txn, schema) - if err != nil { - return nil, err - } - desc.SchemaVersionID = schema.VersionID - for _, localField := range desc.Fields { - var fieldID uint64 - if localField.Name == request.DocIDFieldName { - // There is no hard technical requirement for this, we just think it looks nicer - // if the doc id is at the zero index. It makes it look a little nicer in commit - // queries too. - fieldID = 0 - } else { - fieldID, err = fieldSeq.next(ctx) - if err != nil { - return nil, err + newDefinitions[i].Description.ID = uint32(colID) + newDefinitions[i].Description.RootID = newDefinitions[i].Description.ID + + for _, localField := range def.Description.Fields { + var fieldID uint64 + if localField.Name == request.DocIDFieldName { + // There is no hard technical requirement for this, we just think it looks nicer + // if the doc id is at the zero index. It makes it look a little nicer in commit + // queries too. + fieldID = 0 + } else { + fieldID, err = fieldSeq.next(ctx) + if err != nil { + return nil, err + } } - } - for i := range desc.Fields { - if desc.Fields[i].Name == localField.Name { - desc.Fields[i].ID = client.FieldID(fieldID) - break + for j := range def.Description.Fields { + if def.Description.Fields[j].Name == localField.Name { + newDefinitions[i].Description.Fields[j].ID = client.FieldID(fieldID) + break + } } } } - desc, err = description.SaveCollection(ctx, txn, desc) + err = db.validateNewCollection( + ctx, + append( + append( + []client.CollectionDefinition{}, + newDefinitions..., + ), + existingDefinitions..., + ), + existingDefinitions, + ) if err != nil { return nil, err } - col := db.newCollection(desc, schema) + for _, def := range newDefinitions { + if len(def.Description.Fields) == 0 { + // This is a schema-only definition, we should not create a collection for it + returnDescriptions = append(returnDescriptions, def) + continue + } + + desc, err := description.SaveCollection(ctx, txn, def.Description) + if err != nil { + return nil, err + } + + col := db.newCollection(desc, def.Schema) + + for _, index := range desc.Indexes { + if _, err := col.createIndex(ctx, index); err != nil { + return nil, err + } + } - for _, index := range desc.Indexes { - if _, err := col.createIndex(ctx, index); err != nil { + result, err := db.getCollectionByID(ctx, desc.ID) + if err != nil { return nil, err } + + returnDescriptions = append(returnDescriptions, result.Definition()) } - return db.getCollectionByID(ctx, desc.ID) + return returnDescriptions, nil } func (db *db) patchCollection( @@ -171,7 +184,7 @@ func (db *db) patchCollection( return err } - err = db.validateCollectionChanges(existingColsByID, newColsByID) + err = db.validateCollectionChanges(ctx, cols, newColsByID) if err != nil { return err } diff --git a/internal/db/definition_validation.go b/internal/db/definition_validation.go index 988ebeb15c..08e6e603a7 100644 --- a/internal/db/definition_validation.go +++ b/internal/db/definition_validation.go @@ -14,18 +14,88 @@ import ( "context" "reflect" - "github.com/sourcenetwork/immutable" - "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" ) -var patchCollectionValidators = []func( - map[uint32]client.CollectionDescription, - map[uint32]client.CollectionDescription, -) error{ - validateCollectionNameUnique, - validateSingleVersionActive, +// definitionState holds collection and schema descriptions in easily accessible +// sets. +// +// It is read only and will not and should not be mutated. +type definitionState struct { + collections []client.CollectionDescription + collectionsByID map[uint32]client.CollectionDescription + + schemaByID map[string]client.SchemaDescription + schemaByName map[string]client.SchemaDescription + + definitionsByName map[string]client.CollectionDefinition +} + +// newDefinitionState creates a new definitionState object given the provided +// descriptions. +func newDefinitionState( + collections []client.CollectionDescription, + schemasByID map[string]client.SchemaDescription, +) *definitionState { + collectionsByID := map[uint32]client.CollectionDescription{} + definitionsByName := map[string]client.CollectionDefinition{} + schemaByName := map[string]client.SchemaDescription{} + schemaVersionsAdded := map[string]struct{}{} + + for _, col := range collections { + if len(col.Fields) == 0 { + continue + } + + schema := schemasByID[col.SchemaVersionID] + definition := client.CollectionDefinition{ + Description: col, + Schema: schema, + } + + definitionsByName[definition.GetName()] = definition + schemaVersionsAdded[schema.VersionID] = struct{}{} + collectionsByID[col.ID] = col + } + + for _, schema := range schemasByID { + schemaByName[schema.Name] = schema + + if _, ok := schemaVersionsAdded[schema.VersionID]; ok { + continue + } + + definitionsByName[schema.Name] = client.CollectionDefinition{ + Schema: schema, + } + } + + return &definitionState{ + collections: collections, + collectionsByID: collectionsByID, + schemaByID: schemasByID, + schemaByName: schemaByName, + definitionsByName: definitionsByName, + } +} + +// definitionValidator aliases the signature that all schema and collection +// validation functions should follow. +type definitionValidator = func( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error + +// createOnlyValidators are executed on the creation of new descriptions only +// they will not be executed for updates to existing records. +var createOnlyValidators = []definitionValidator{} + +// createOnlyValidators are executed on the update of existing descriptions only +// they will not be executed for new records. +var updateOnlyValidators = []definitionValidator{ validateSourcesNotRedefined, validateIndexesNotModified, validateFieldsNotModified, @@ -36,23 +106,53 @@ var patchCollectionValidators = []func( validateRootIDNotMutated, validateSchemaVersionIDNotMutated, validateCollectionNotRemoved, + validateSingleVersionActive, + validateSchemaNotAdded, + validateSchemaFieldNotDeleted, + validateFieldNotMutated, + validateFieldNotMoved, } -var newCollectionValidators = []func( - client.CollectionDefinition, - map[string]client.CollectionDefinition, -) error{ - validateSecondaryFieldsPairUp, +// globalValidators are run on create and update of records. +var globalValidators = []definitionValidator{ + validateCollectionNameUnique, validateRelationPointsToValidKind, + validateSecondaryFieldsPairUp, validateSingleSidePrimary, + validateCollectionDefinitionPolicyDesc, + validateSchemaNameNotEmpty, + validateRelationalFieldIDType, + validateSecondaryNotOnSchema, + validateTypeSupported, + validateTypeAndKindCompatible, + validateFieldNotDuplicated, } +var updateValidators = append( + append([]definitionValidator{}, updateOnlyValidators...), + globalValidators..., +) + +var createValidators = append( + append([]definitionValidator{}, createOnlyValidators...), + globalValidators..., +) + func (db *db) validateCollectionChanges( - oldColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + oldCols []client.CollectionDescription, newColsByID map[uint32]client.CollectionDescription, ) error { - for _, validators := range patchCollectionValidators { - err := validators(oldColsByID, newColsByID) + newCols := make([]client.CollectionDescription, 0, len(newColsByID)) + for _, col := range newColsByID { + newCols = append(newCols, col) + } + + newState := newDefinitionState(newCols, map[string]client.SchemaDescription{}) + oldState := newDefinitionState(oldCols, map[string]client.SchemaDescription{}) + + for _, validator := range updateValidators { + err := validator(ctx, db, newState, oldState) if err != nil { return err } @@ -62,11 +162,38 @@ func (db *db) validateCollectionChanges( } func (db *db) validateNewCollection( - def client.CollectionDefinition, - defsByName map[string]client.CollectionDefinition, + ctx context.Context, + newDefinitions []client.CollectionDefinition, + oldDefinitions []client.CollectionDefinition, ) error { - for _, validators := range newCollectionValidators { - err := validators(def, defsByName) + newCollections := []client.CollectionDescription{} + newSchemasByID := map[string]client.SchemaDescription{} + + for _, def := range newDefinitions { + if len(def.Description.Fields) != 0 { + newCollections = append(newCollections, def.Description) + } + + newSchemasByID[def.Schema.VersionID] = def.Schema + } + + newState := newDefinitionState(newCollections, newSchemasByID) + + oldCollections := []client.CollectionDescription{} + oldSchemasByID := map[string]client.SchemaDescription{} + + for _, def := range oldDefinitions { + if len(def.Description.Fields) != 0 { + oldCollections = append(oldCollections, def.Description) + } + + oldSchemasByID[def.Schema.VersionID] = def.Schema + } + + oldState := newDefinitionState(oldCollections, oldSchemasByID) + + for _, validator := range createValidators { + err := validator(ctx, db, newState, oldState) if err != nil { return err } @@ -75,75 +202,134 @@ func (db *db) validateNewCollection( return nil } -func validateRelationPointsToValidKind( - def client.CollectionDefinition, - defsByName map[string]client.CollectionDefinition, +func (db *db) validateSchemaUpdate( + ctx context.Context, + newSchemaByName map[string]client.SchemaDescription, + oldSchemaByName map[string]client.SchemaDescription, ) error { - for _, field := range def.Description.Fields { - if !field.Kind.HasValue() { - continue - } + newSchemaByID := make(map[string]client.SchemaDescription, len(newSchemaByName)) + oldSchemaByID := make(map[string]client.SchemaDescription, len(oldSchemaByName)) + for _, schema := range newSchemaByName { + newSchemaByID[schema.VersionID] = schema + } + for _, schema := range oldSchemaByName { + oldSchemaByID[schema.VersionID] = schema + } - if !field.Kind.Value().IsObject() { - continue - } + newState := newDefinitionState([]client.CollectionDescription{}, newSchemaByID) + oldState := newDefinitionState([]client.CollectionDescription{}, oldSchemaByID) - underlying := field.Kind.Value().Underlying() - _, ok := defsByName[underlying] - if !ok { - return NewErrFieldKindNotFound(field.Name, underlying) + for _, validator := range updateValidators { + err := validator(ctx, db, newState, oldState) + if err != nil { + return err } } return nil } -func validateSecondaryFieldsPairUp( - def client.CollectionDefinition, - defsByName map[string]client.CollectionDefinition, +func validateRelationPointsToValidKind( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, field := range def.Description.Fields { - if !field.Kind.HasValue() { - continue - } + for _, newCollection := range newState.collections { + for _, field := range newCollection.Fields { + if !field.Kind.HasValue() { + continue + } - if !field.Kind.Value().IsObject() { - continue - } + if !field.Kind.Value().IsObject() { + continue + } - if !field.RelationName.HasValue() { - continue + underlying := field.Kind.Value().Underlying() + _, ok := newState.definitionsByName[underlying] + if !ok { + return NewErrFieldKindNotFound(field.Name, underlying) + } } + } - _, hasSchemaField := def.Schema.GetFieldByName(field.Name) - if hasSchemaField { - continue + for _, schema := range newState.schemaByName { + for _, field := range schema.Fields { + if !field.Kind.IsObject() { + continue + } + + underlying := field.Kind.Underlying() + _, ok := newState.definitionsByName[underlying] + if !ok { + return NewErrFieldKindNotFound(field.Name, underlying) + } } + } + + return nil +} - underlying := field.Kind.Value().Underlying() - otherDef, ok := defsByName[underlying] +func validateSecondaryFieldsPairUp( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, newCollection := range newState.collections { + schema, ok := newState.schemaByID[newCollection.SchemaVersionID] if !ok { continue } - if len(otherDef.Description.Fields) == 0 { - // Views/embedded objects do not require both sides of the relation to be defined. - continue + definition := client.CollectionDefinition{ + Description: newCollection, + Schema: schema, } - otherField, ok := otherDef.Description.GetFieldByRelation( - field.RelationName.Value(), - def.GetName(), - field.Name, - ) - if !ok { - return NewErrRelationMissingField(underlying, field.RelationName.Value()) - } + for _, field := range newCollection.Fields { + if !field.Kind.HasValue() { + continue + } - _, ok = otherDef.Schema.GetFieldByName(otherField.Name) - if !ok { - // This secondary is paired with another secondary, which is invalid - return NewErrRelationMissingField(underlying, field.RelationName.Value()) + if !field.Kind.Value().IsObject() { + continue + } + + if !field.RelationName.HasValue() { + continue + } + + _, hasSchemaField := schema.GetFieldByName(field.Name) + if hasSchemaField { + continue + } + + underlying := field.Kind.Value().Underlying() + otherDef, ok := newState.definitionsByName[underlying] + if !ok { + continue + } + + if len(otherDef.Description.Fields) == 0 { + // Views/embedded objects do not require both sides of the relation to be defined. + continue + } + + otherField, ok := otherDef.Description.GetFieldByRelation( + field.RelationName.Value(), + definition.GetName(), + field.Name, + ) + if !ok { + return NewErrRelationMissingField(underlying, field.RelationName.Value()) + } + + _, ok = otherDef.Schema.GetFieldByName(otherField.Name) + if !ok { + // This secondary is paired with another secondary, which is invalid + return NewErrRelationMissingField(underlying, field.RelationName.Value()) + } } } @@ -151,48 +337,57 @@ func validateSecondaryFieldsPairUp( } func validateSingleSidePrimary( - def client.CollectionDefinition, - defsByName map[string]client.CollectionDefinition, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, field := range def.Description.Fields { - if !field.Kind.HasValue() { + for _, newCollection := range newState.collections { + schema, ok := newState.schemaByID[newCollection.SchemaVersionID] + if !ok { continue } - if !field.Kind.Value().IsObject() { - continue + definition := client.CollectionDefinition{ + Description: newCollection, + Schema: schema, } - if !field.RelationName.HasValue() { - continue - } + for _, field := range definition.GetFields() { + if !field.Kind.IsObject() { + continue + } - _, hasSchemaField := def.Schema.GetFieldByName(field.Name) - if !hasSchemaField { - // This is a secondary field and thus passes this rule - continue - } + if field.RelationName == "" { + continue + } - underlying := field.Kind.Value().Underlying() - otherDef, ok := defsByName[underlying] - if !ok { - continue - } + if !field.IsPrimaryRelation { + // This is a secondary field and thus passes this rule + continue + } - otherField, ok := otherDef.Description.GetFieldByRelation( - field.RelationName.Value(), - def.GetName(), - field.Name, - ) - if !ok { - // This must be a one-sided relation, in which case it passes this rule - continue - } + underlying := field.Kind.Underlying() + otherDef, ok := newState.definitionsByName[underlying] + if !ok { + continue + } + + otherField, ok := otherDef.Description.GetFieldByRelation( + field.RelationName, + definition.GetName(), + field.Name, + ) + if !ok { + // This must be a one-sided relation, in which case it passes this rule + continue + } - _, ok = otherDef.Schema.GetFieldByName(otherField.Name) - if ok { - // This primary is paired with another primary, which is invalid - return ErrMultipleRelationPrimaries + _, ok = otherDef.Schema.GetFieldByName(otherField.Name) + if ok { + // This primary is paired with another primary, which is invalid + return ErrMultipleRelationPrimaries + } } } @@ -200,11 +395,13 @@ func validateSingleSidePrimary( } func validateCollectionNameUnique( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { names := map[string]struct{}{} - for _, col := range newColsByID { + for _, col := range newState.collections { if !col.Name.HasValue() { continue } @@ -219,11 +416,13 @@ func validateCollectionNameUnique( } func validateSingleVersionActive( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { rootsWithActiveCol := map[uint32]struct{}{} - for _, col := range newColsByID { + for _, col := range newState.collections { if !col.Name.HasValue() { continue } @@ -243,11 +442,13 @@ func validateSingleVersionActive( // Currently new sources cannot be added, existing cannot be removed, and CollectionSources // cannot be redirected to other collections. func validateSourcesNotRedefined( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - oldCol, ok := oldColsByID[newCol.ID] + for _, newCol := range newState.collections { + oldCol, ok := oldState.collectionsByID[newCol.ID] if !ok { continue } @@ -281,11 +482,13 @@ func validateSourcesNotRedefined( } func validateIndexesNotModified( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - oldCol, ok := oldColsByID[newCol.ID] + for _, newCol := range newState.collections { + oldCol, ok := oldState.collectionsByID[newCol.ID] if !ok { continue } @@ -300,11 +503,13 @@ func validateIndexesNotModified( } func validateFieldsNotModified( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - oldCol, ok := oldColsByID[newCol.ID] + for _, newCol := range newState.collections { + oldCol, ok := oldState.collectionsByID[newCol.ID] if !ok { continue } @@ -319,11 +524,13 @@ func validateFieldsNotModified( } func validatePolicyNotModified( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - oldCol, ok := oldColsByID[newCol.ID] + for _, newCol := range newState.collections { + oldCol, ok := oldState.collectionsByID[newCol.ID] if !ok { continue } @@ -338,10 +545,12 @@ func validatePolicyNotModified( } func validateIDNotZero( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { + for _, newCol := range newState.collections { if newCol.ID == 0 { return ErrCollectionIDCannotBeZero } @@ -351,11 +560,13 @@ func validateIDNotZero( } func validateIDUnique( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { colIds := map[uint32]struct{}{} - for _, newCol := range newColsByID { + for _, newCol := range newState.collections { if _, ok := colIds[newCol.ID]; ok { return NewErrCollectionIDAlreadyExists(newCol.ID) } @@ -366,11 +577,13 @@ func validateIDUnique( } func validateIDExists( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - if _, ok := oldColsByID[newCol.ID]; !ok { + for _, newCol := range newState.collections { + if _, ok := oldState.collectionsByID[newCol.ID]; !ok { return NewErrAddCollectionIDWithPatch(newCol.ID) } } @@ -379,11 +592,13 @@ func validateIDExists( } func validateRootIDNotMutated( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - oldCol, ok := oldColsByID[newCol.ID] + for _, newCol := range newState.collections { + oldCol, ok := oldState.collectionsByID[newCol.ID] if !ok { continue } @@ -393,15 +608,28 @@ func validateRootIDNotMutated( } } + for _, newSchema := range newState.schemaByName { + oldSchema := oldState.schemaByName[newSchema.Name] + if newSchema.Root != oldSchema.Root { + return NewErrSchemaRootDoesntMatch( + newSchema.Name, + oldSchema.Root, + newSchema.Root, + ) + } + } + return nil } func validateSchemaVersionIDNotMutated( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { - for _, newCol := range newColsByID { - oldCol, ok := oldColsByID[newCol.ID] + for _, newCol := range newState.collections { + oldCol, ok := oldState.collectionsByID[newCol.ID] if !ok { continue } @@ -411,16 +639,26 @@ func validateSchemaVersionIDNotMutated( } } + for _, newSchema := range newState.schemaByName { + oldSchema := oldState.schemaByName[newSchema.Name] + if newSchema.VersionID != "" && newSchema.VersionID != oldSchema.VersionID { + // If users specify this it will be overwritten, an error is preferred to quietly ignoring it. + return ErrCannotSetVersionID + } + } + return nil } func validateCollectionNotRemoved( - oldColsByID map[uint32]client.CollectionDescription, - newColsByID map[uint32]client.CollectionDescription, + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, ) error { oldLoop: - for _, oldCol := range oldColsByID { - for _, newCol := range newColsByID { + for _, oldCol := range oldState.collections { + for _, newCol := range newState.collectionsByID { // It is not enough to just match by the map index, in case the index does not pair // up with the ID (this can happen if a user moves the collection within the map) if newCol.ID == oldCol.ID { @@ -438,148 +676,245 @@ oldLoop: // // Ensures that the information within the policy definition makes sense, // this function might also make relevant remote calls using the acp system. -func (db *db) validateCollectionDefinitionPolicyDesc( +func validateCollectionDefinitionPolicyDesc( ctx context.Context, - policyDesc immutable.Option[client.PolicyDescription], + db *db, + newState *definitionState, + oldState *definitionState, ) error { - if !policyDesc.HasValue() { - // No policy validation needed, whether acp exists or not doesn't matter. - return nil - } + for _, newCol := range newState.collections { + if !newCol.Policy.HasValue() { + // No policy validation needed, whether acp exists or not doesn't matter. + continue + } + + // If there is a policy specified, but the database does not have + // acp enabled/available return an error, database must have an acp available + // to enable access control (inorder to adhere to the policy specified). + if !db.acp.HasValue() { + return ErrCanNotHavePolicyWithoutACP + } + + // If we have the policy specified on the collection, and acp is available/enabled, + // then using the acp system we need to ensure the policy id specified + // actually exists as a policy, and the resource name exists on that policy + // and that the resource is a valid DPI. + err := db.acp.Value().ValidateResourceExistsOnValidDPI( + ctx, + newCol.Policy.Value().ID, + newCol.Policy.Value().ResourceName, + ) - // If there is a policy specified, but the database does not have - // acp enabled/available return an error, database must have an acp available - // to enable access control (inorder to adhere to the policy specified). - if !db.acp.HasValue() { - return ErrCanNotHavePolicyWithoutACP + if err != nil { + return err + } } - // If we have the policy specified on the collection, and acp is available/enabled, - // then using the acp system we need to ensure the policy id specified - // actually exists as a policy, and the resource name exists on that policy - // and that the resource is a valid DPI. - return db.acp.Value().ValidateResourceExistsOnValidDPI( - ctx, - policyDesc.Value().ID, - policyDesc.Value().ResourceName, - ) + return nil } -// validateUpdateSchema validates that the given schema description is a valid update. -// -// Will return true if the given description differs from the current persisted state of the -// schema. Will return an error if it fails validation. -func (db *db) validateUpdateSchema( - existingDescriptionsByName map[string]client.SchemaDescription, - proposedDescriptionsByName map[string]client.SchemaDescription, - proposedDesc client.SchemaDescription, -) (bool, error) { - if proposedDesc.Name == "" { - return false, ErrSchemaNameEmpty - } - - existingDesc, collectionExists := existingDescriptionsByName[proposedDesc.Name] - if !collectionExists { - return false, NewErrAddCollectionWithPatch(proposedDesc.Name) - } - - if proposedDesc.Root != existingDesc.Root { - return false, NewErrSchemaRootDoesntMatch( - proposedDesc.Name, - existingDesc.Root, - proposedDesc.Root, - ) - } +func validateSchemaFieldNotDeleted( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, newSchema := range newState.schemaByName { + oldSchema := oldState.schemaByName[newSchema.Name] + + for _, oldField := range oldSchema.Fields { + stillExists := false + for _, newField := range newSchema.Fields { + if newField.Name == oldField.Name { + stillExists = true + break + } + } - if proposedDesc.Name != existingDesc.Name { - // There is actually little reason to not support this atm besides controlling the surface area - // of the new feature. Changing this should not break anything, but it should be tested first. - return false, NewErrCannotModifySchemaName(existingDesc.Name, proposedDesc.Name) + if !stillExists { + return NewErrCannotDeleteField(oldField.Name) + } + } } - if proposedDesc.VersionID != "" && proposedDesc.VersionID != existingDesc.VersionID { - // If users specify this it will be overwritten, an error is preferred to quietly ignoring it. - return false, ErrCannotSetVersionID - } + return nil +} - hasChangedFields, err := validateUpdateSchemaFields(proposedDescriptionsByName, existingDesc, proposedDesc) - if err != nil { - return hasChangedFields, err +func validateTypeAndKindCompatible( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, newSchema := range newState.schemaByName { + for _, newField := range newSchema.Fields { + if !newField.Typ.IsCompatibleWith(newField.Kind) { + return client.NewErrCRDTKindMismatch(newField.Typ.String(), newField.Kind.String()) + } + } } - return hasChangedFields, err + return nil } -func validateUpdateSchemaFields( - descriptionsByName map[string]client.SchemaDescription, - existingDesc client.SchemaDescription, - proposedDesc client.SchemaDescription, -) (bool, error) { - hasChanged := false - existingFieldsByName := map[string]client.SchemaFieldDescription{} - existingFieldIndexesByName := map[string]int{} - for i, field := range existingDesc.Fields { - existingFieldIndexesByName[field.Name] = i - existingFieldsByName[field.Name] = field +func validateTypeSupported( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, newSchema := range newState.schemaByName { + for _, newField := range newSchema.Fields { + if !newField.Typ.IsSupportedFieldCType() { + return client.NewErrInvalidCRDTType(newField.Name, newField.Typ.String()) + } + } } - newFieldNames := map[string]struct{}{} - for proposedIndex, proposedField := range proposedDesc.Fields { - existingField, fieldAlreadyExists := existingFieldsByName[proposedField.Name] - - // If the field is new, then the collection has changed - hasChanged = hasChanged || !fieldAlreadyExists + return nil +} - if !fieldAlreadyExists && proposedField.Kind.IsObject() { - _, relatedDescFound := descriptionsByName[proposedField.Kind.Underlying()] +func validateFieldNotMoved( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, oldSchema := range oldState.schemaByName { + oldFieldIndexesByName := map[string]int{} + for i, field := range oldSchema.Fields { + oldFieldIndexesByName[field.Name] = i + } - if !relatedDescFound { - return false, NewErrFieldKindNotFound(proposedField.Name, proposedField.Kind.Underlying()) - } + newSchema := newState.schemaByName[oldSchema.Name] - if proposedField.Kind.IsObject() && !proposedField.Kind.IsArray() { - idFieldName := proposedField.Name + request.RelatedObjectID - idField, idFieldFound := proposedDesc.GetFieldByName(idFieldName) - if idFieldFound { - if idField.Kind != client.FieldKind_DocID { - return false, NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocID, idField.Kind) - } - } + for newIndex, newField := range newSchema.Fields { + if existingIndex, exists := oldFieldIndexesByName[newField.Name]; exists && newIndex != existingIndex { + return NewErrCannotMoveField(newField.Name, newIndex, existingIndex) } } + } - if proposedField.Kind.IsObjectArray() { - return false, NewErrSecondaryFieldOnSchema(proposedField.Name) + return nil +} + +func validateFieldNotMutated( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, oldSchema := range oldState.schemaByName { + oldFieldsByName := map[string]client.SchemaFieldDescription{} + for _, field := range oldSchema.Fields { + oldFieldsByName[field.Name] = field } - if _, isDuplicate := newFieldNames[proposedField.Name]; isDuplicate { - return false, NewErrDuplicateField(proposedField.Name) + newSchema := newState.schemaByName[oldSchema.Name] + + for _, newField := range newSchema.Fields { + oldField, exists := oldFieldsByName[newField.Name] + if exists && oldField != newField { + return NewErrCannotMutateField(newField.Name) + } } + } - if fieldAlreadyExists && proposedField != existingField { - return false, NewErrCannotMutateField(proposedField.Name) + return nil +} + +func validateFieldNotDuplicated( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, schema := range newState.schemaByName { + fieldNames := map[string]struct{}{} + + for _, field := range schema.Fields { + if _, isDuplicate := fieldNames[field.Name]; isDuplicate { + return NewErrDuplicateField(field.Name) + } + fieldNames[field.Name] = struct{}{} } + } + + return nil +} - if existingIndex := existingFieldIndexesByName[proposedField.Name]; fieldAlreadyExists && - proposedIndex != existingIndex { - return false, NewErrCannotMoveField(proposedField.Name, proposedIndex, existingIndex) +func validateSecondaryNotOnSchema( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, newSchema := range newState.schemaByName { + for _, newField := range newSchema.Fields { + if newField.Kind.IsObjectArray() { + return NewErrSecondaryFieldOnSchema(newField.Name) + } } + } + + return nil +} + +func validateRelationalFieldIDType( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, schema := range newState.schemaByName { + fieldsByName := map[string]client.SchemaFieldDescription{} - if !proposedField.Typ.IsSupportedFieldCType() { - return false, client.NewErrInvalidCRDTType(proposedField.Name, proposedField.Typ.String()) + for _, field := range schema.Fields { + fieldsByName[field.Name] = field } - if !proposedField.Typ.IsCompatibleWith(proposedField.Kind) { - return false, client.NewErrCRDTKindMismatch(proposedField.Typ.String(), proposedField.Kind.String()) + for _, field := range schema.Fields { + if field.Kind.IsObject() && !field.Kind.IsArray() { + idFieldName := field.Name + request.RelatedObjectID + idField, idFieldFound := fieldsByName[idFieldName] + if idFieldFound { + if idField.Kind != client.FieldKind_DocID { + return NewErrRelationalFieldIDInvalidType(idField.Name, client.FieldKind_DocID, idField.Kind) + } + } + } } + } + + return nil +} - newFieldNames[proposedField.Name] = struct{}{} +func validateSchemaNotAdded( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, newSchema := range newState.schemaByName { + if _, exists := oldState.schemaByName[newSchema.Name]; !exists { + return NewErrAddSchemaWithPatch(newSchema.Name) + } } - for _, field := range existingDesc.Fields { - if _, stillExists := newFieldNames[field.Name]; !stillExists { - return false, NewErrCannotDeleteField(field.Name) + return nil +} + +func validateSchemaNameNotEmpty( + ctx context.Context, + db *db, + newState *definitionState, + oldState *definitionState, +) error { + for _, schema := range newState.schemaByName { + if schema.Name == "" { + return ErrSchemaNameEmpty } } - return hasChanged, nil + + return nil } diff --git a/internal/db/errors.go b/internal/db/errors.go index 8d3c770bd8..7a81824efe 100644 --- a/internal/db/errors.go +++ b/internal/db/errors.go @@ -24,9 +24,9 @@ const ( errAddingP2PCollection string = "cannot add collection ID" errRemovingP2PCollection string = "cannot remove collection ID" errAddCollectionWithPatch string = "adding collections via patch is not supported" + errAddSchemaWithPatch string = "adding schema via patch is not supported" errCollectionIDDoesntMatch string = "CollectionID does not match existing" errSchemaRootDoesntMatch string = "SchemaRoot does not match existing" - errCannotModifySchemaName string = "modifying the schema name is not supported" errCannotSetVersionID string = "setting the VersionID is not supported" errRelationalFieldInvalidRelationType string = "invalid RelationType" errRelationalFieldMissingIDField string = "missing id field for relation object field" @@ -224,9 +224,9 @@ func NewErrRemovingP2PCollection(inner error) error { return errors.Wrap(errRemovingP2PCollection, inner) } -func NewErrAddCollectionWithPatch(name string) error { +func NewErrAddSchemaWithPatch(name string) error { return errors.New( - errAddCollectionWithPatch, + errAddSchemaWithPatch, errors.NewKV("Name", name), ) } @@ -256,14 +256,6 @@ func NewErrSchemaRootDoesntMatch(name, existingRoot, proposedRoot string) error ) } -func NewErrCannotModifySchemaName(existingName, proposedName string) error { - return errors.New( - errCannotModifySchemaName, - errors.NewKV("ExistingName", existingName), - errors.NewKV("ProposedName", proposedName), - ) -} - func NewErrRelationalFieldMissingIDField(name string, expectedName string) error { return errors.New( errRelationalFieldMissingIDField, diff --git a/internal/db/schema.go b/internal/db/schema.go index 8c0ba074dc..d2aeb8bcb9 100644 --- a/internal/db/schema.go +++ b/internal/db/schema.go @@ -44,19 +44,14 @@ func (db *db) addSchema( return nil, err } - returnDescriptions := make([]client.CollectionDescription, len(newDefinitions)) - for i, definition := range newDefinitions { - // Only accept the schema if policy description is valid, otherwise reject the schema. - err := db.validateCollectionDefinitionPolicyDesc(ctx, definition.Description.Policy) - if err != nil { - return nil, err - } + returnDefinitions, err := db.createCollections(ctx, newDefinitions) + if err != nil { + return nil, err + } - col, err := db.createCollection(ctx, definition, newDefinitions) - if err != nil { - return nil, err - } - returnDescriptions[i] = col.Description() + returnDescriptions := make([]client.CollectionDescription, len(returnDefinitions)) + for i, def := range returnDefinitions { + returnDescriptions[i] = def.Description } err = db.loadSchema(ctx) @@ -341,19 +336,18 @@ func (db *db) updateSchema( migration immutable.Option[model.Lens], setAsActiveVersion bool, ) error { - hasChanged, err := db.validateUpdateSchema( - existingSchemaByName, - proposedDescriptionsByName, - schema, - ) - if err != nil { - return err - } + previousSchema := existingSchemaByName[schema.Name] - if !hasChanged { + areEqual := areSchemasEqual(schema, previousSchema) + if areEqual { return nil } + err := db.validateSchemaUpdate(ctx, proposedDescriptionsByName, existingSchemaByName) + if err != nil { + return err + } + for _, field := range schema.Fields { if field.Kind.IsObject() && !field.Kind.IsArray() { idFieldName := field.Name + "_id" @@ -366,8 +360,13 @@ func (db *db) updateSchema( } } + previousFieldNames := make(map[string]struct{}, len(previousSchema.Fields)) + for _, field := range previousSchema.Fields { + previousFieldNames[field.Name] = struct{}{} + } + for i, field := range schema.Fields { - if field.Typ == client.NONE_CRDT { + if _, existed := previousFieldNames[field.Name]; !existed && field.Typ == client.NONE_CRDT { // If no CRDT Type has been provided, default to LWW_REGISTER. field.Typ = client.LWW_REGISTER schema.Fields[i] = field @@ -524,3 +523,19 @@ func (db *db) updateSchema( return nil } + +func areSchemasEqual(this client.SchemaDescription, that client.SchemaDescription) bool { + if len(this.Fields) != len(that.Fields) { + return false + } + + for i, thisField := range this.Fields { + if thisField != that.Fields[i] { + return false + } + } + + return this.Name == that.Name && + this.Root == that.Root && + this.VersionID == that.VersionID +} diff --git a/internal/db/view.go b/internal/db/view.go index a663da7add..2664dd4a57 100644 --- a/internal/db/view.go +++ b/internal/db/view.go @@ -20,7 +20,6 @@ import ( "github.com/sourcenetwork/defradb/client" "github.com/sourcenetwork/defradb/client/request" - "github.com/sourcenetwork/defradb/internal/db/description" ) func (db *db) addView( @@ -29,8 +28,6 @@ func (db *db) addView( sdl string, transform immutable.Option[model.Lens], ) ([]client.CollectionDefinition, error) { - txn := mustGetContextTxn(ctx) - // Wrap the given query as part of the GQL query object - this simplifies the syntax for users // and ensures that we can't be given mutations. In the future this line should disappear along // with the all calls to the parser appart from `ParseSDL` when we implement the DQL stuff. @@ -68,30 +65,17 @@ func (db *db) addView( newDefinitions[i].Description.Sources = append(newDefinitions[i].Description.Sources, &source) } - returnDescriptions := make([]client.CollectionDefinition, len(newDefinitions)) - for i, definition := range newDefinitions { - if !definition.Description.Name.HasValue() { - schema, err := description.CreateSchemaVersion(ctx, txn, definition.Schema) - if err != nil { - return nil, err - } - returnDescriptions[i] = client.CollectionDefinition{ - // `Collection` is left as default for embedded types - Schema: schema, - } - } else { - col, err := db.createCollection(ctx, definition, newDefinitions) - if err != nil { - return nil, err - } - returnDescriptions[i] = col.Definition() + returnDescriptions, err := db.createCollections(ctx, newDefinitions) + if err != nil { + return nil, err + } - for _, source := range col.Description().QuerySources() { - if source.Transform.HasValue() { - err = db.LensRegistry().SetMigration(ctx, col.ID(), source.Transform.Value()) - if err != nil { - return nil, err - } + for _, definition := range returnDescriptions { + for _, source := range definition.Description.QuerySources() { + if source.Transform.HasValue() { + err = db.LensRegistry().SetMigration(ctx, definition.Description.ID, source.Transform.Value()) + if err != nil { + return nil, err } } } diff --git a/tests/integration/collection_description/updates/copy/name_test.go b/tests/integration/collection_description/updates/copy/name_test.go index b915d111ac..f5cbd3a83b 100644 --- a/tests/integration/collection_description/updates/copy/name_test.go +++ b/tests/integration/collection_description/updates/copy/name_test.go @@ -40,7 +40,7 @@ func TestColDescrUpdateCopyName_Errors(t *testing.T) { { "op": "copy", "from": "/1/Name", "path": "/2/Name" } ] `, - ExpectedError: "collection already exists. Name: Users", + ExpectedError: "multiple versions of same collection cannot be active. Name: Users, Root: 1", }, }, } diff --git a/tests/integration/collection_description/updates/replace/name_test.go b/tests/integration/collection_description/updates/replace/name_test.go index 98f1ba8c98..55e8160969 100644 --- a/tests/integration/collection_description/updates/replace/name_test.go +++ b/tests/integration/collection_description/updates/replace/name_test.go @@ -99,7 +99,7 @@ func TestColDescrUpdateReplaceName_GivenInactiveCollectionWithSameName_Errors(t { "op": "replace", "path": "/2/Name", "value": "Users" } ] `, - ExpectedError: "collection already exists. Name: Users", + ExpectedError: "multiple versions of same collection cannot be active. Name: Users, Root: 1", }, }, } diff --git a/tests/integration/schema/get_schema_test.go b/tests/integration/schema/get_schema_test.go index a89f4a2eb9..7f04c99c9e 100644 --- a/tests/integration/schema/get_schema_test.go +++ b/tests/integration/schema/get_schema_test.go @@ -72,7 +72,7 @@ func TestGetSchema_GivenNoSchemaGivenUnknownName(t *testing.T) { func TestGetSchema_ReturnsAllSchema(t *testing.T) { usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" - usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" + usersSchemaVersion2ID := "bafkreialnju2rez4t3quvpobf3463eai3lo64vdrdhdmunz7yy7sv3f5ce" booksSchemaVersion1ID := "bafkreibiu34zrehpq346pwp5z24qkderm7ibhnpcqalhkivhnf5e2afqoy" test := testUtils.TestCase{ @@ -116,7 +116,7 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -146,7 +146,7 @@ func TestGetSchema_ReturnsAllSchema(t *testing.T) { func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" - usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" + usersSchemaVersion2ID := "bafkreialnju2rez4t3quvpobf3463eai3lo64vdrdhdmunz7yy7sv3f5ce" test := testUtils.TestCase{ Actions: []any{ @@ -190,7 +190,7 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -209,7 +209,7 @@ func TestGetSchema_ReturnsSchemaForGivenRoot(t *testing.T) { func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { usersSchemaVersion1ID := "bafkreia2jn5ecrhtvy4fravk6pm3wqiny46m7mqymvjkgat7xiqupgqoai" - usersSchemaVersion2ID := "bafkreibbsqjeladin2keszmja5kektzgi4eowb6m3oimxssiqge7mmvhva" + usersSchemaVersion2ID := "bafkreialnju2rez4t3quvpobf3463eai3lo64vdrdhdmunz7yy7sv3f5ce" test := testUtils.TestCase{ Actions: []any{ @@ -253,7 +253,7 @@ func TestGetSchema_ReturnsSchemaForGivenName(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", diff --git a/tests/integration/schema/migrations/query/simple_test.go b/tests/integration/schema/migrations/query/simple_test.go index a588e70e87..4e0ca20f2b 100644 --- a/tests/integration/schema/migrations/query/simple_test.go +++ b/tests/integration/schema/migrations/query/simple_test.go @@ -46,7 +46,7 @@ func TestSchemaMigrationQuery(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -116,7 +116,7 @@ func TestSchemaMigrationQueryMultipleDocs(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -179,7 +179,7 @@ func TestSchemaMigrationQueryWithMigrationRegisteredBeforeSchemaPatch(t *testing testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -255,7 +255,7 @@ func TestSchemaMigrationQueryMigratesToIntermediaryVersion(t *testing.T) { // there should be no migration from version 2 to version 3. LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -325,8 +325,8 @@ func TestSchemaMigrationQueryMigratesFromIntermediaryVersion(t *testing.T) { // Register a migration from schema version 2 to schema version 3 **only** - // there should be no migration from version 1 to version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", - DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", + SourceSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", + DestinationSchemaVersionID: "bafkreicpdtq27uclgcyeqivvyjvojtk57a573y3upfhi3lvteytktyhlva", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -395,7 +395,7 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -411,8 +411,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", - DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", + SourceSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", + DestinationSchemaVersionID: "bafkreicpdtq27uclgcyeqivvyjvojtk57a573y3upfhi3lvteytktyhlva", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -467,7 +467,7 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -483,8 +483,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatches(t *test }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", - DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", + SourceSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", + DestinationSchemaVersionID: "bafkreicpdtq27uclgcyeqivvyjvojtk57a573y3upfhi3lvteytktyhlva", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -553,8 +553,8 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd testUtils.ConfigureMigration{ // Declare the migration from v2=>v3 before declaring the migration from v1=>v2 LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", - DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", + SourceSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", + DestinationSchemaVersionID: "bafkreicpdtq27uclgcyeqivvyjvojtk57a573y3upfhi3lvteytktyhlva", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -571,7 +571,7 @@ func TestSchemaMigrationQueryMigratesAcrossMultipleVersionsBeforePatchesWrongOrd testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -713,7 +713,7 @@ func TestSchemaMigrationQueryMigrationMutatesExistingScalarField(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -774,7 +774,7 @@ func TestSchemaMigrationQueryMigrationMutatesExistingInlineArrayField(t *testing testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreicn6ltdovb6y7g3ecoptqkvx2y5y5yntrb5uydmg3jiakskqva2ta", - DestinationSchemaVersionID: "bafkreifv4vhz3dw7upc5u3omsqi6klz3h3e54ogfskp72gtut62fuxqrcu", + DestinationSchemaVersionID: "bafkreigb473jarbms7de62ykdu5necvxukmb6zbzolp4szdjcwzjvomuiq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -837,7 +837,7 @@ func TestSchemaMigrationQueryMigrationRemovesExistingField(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", - DestinationSchemaVersionID: "bafkreiegvk3fkcjxoqqpp7npxqjdjwijiwthvynzmsvtzajpjevgu2krku", + DestinationSchemaVersionID: "bafkreibbnm7nrtnvwo7hmjjxacx7nxlqkp6bfr24vtlbv5vhwttlhrbr4q", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -898,7 +898,7 @@ func TestSchemaMigrationQueryMigrationPreservesExistingFieldWhenFieldNotRequeste testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", - DestinationSchemaVersionID: "bafkreiegvk3fkcjxoqqpp7npxqjdjwijiwthvynzmsvtzajpjevgu2krku", + DestinationSchemaVersionID: "bafkreibbnm7nrtnvwo7hmjjxacx7nxlqkp6bfr24vtlbv5vhwttlhrbr4q", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -972,7 +972,7 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcFieldNotRequeste testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", - DestinationSchemaVersionID: "bafkreidgnuvanzqur3pkp4mmrd77ojwvov2rlczraaks4435e6wsgxpwoq", + DestinationSchemaVersionID: "bafkreifhm3admsxmv3xsbxehfkmtfnxqaq5wchrx47e7zc6vaxr352b3om", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -1034,7 +1034,7 @@ func TestSchemaMigrationQueryMigrationCopiesExistingFieldWhenSrcAndDstFieldNotRe testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreihhd6bqrjhl5zidwztgxzeseveplv3cj3fwtn3unjkdx7j2vr2vrq", - DestinationSchemaVersionID: "bafkreidgnuvanzqur3pkp4mmrd77ojwvov2rlczraaks4435e6wsgxpwoq", + DestinationSchemaVersionID: "bafkreifhm3admsxmv3xsbxehfkmtfnxqaq5wchrx47e7zc6vaxr352b3om", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_doc_id_test.go b/tests/integration/schema/migrations/query/with_doc_id_test.go index a006441c4f..2bd34a6fd4 100644 --- a/tests/integration/schema/migrations/query/with_doc_id_test.go +++ b/tests/integration/schema/migrations/query/with_doc_id_test.go @@ -53,7 +53,7 @@ func TestSchemaMigrationQueryByDocID(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -159,7 +159,7 @@ func TestSchemaMigrationQueryMultipleQueriesByDocID(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_inverse_test.go b/tests/integration/schema/migrations/query/with_inverse_test.go index f436c332c0..11c83c5fd4 100644 --- a/tests/integration/schema/migrations/query/with_inverse_test.go +++ b/tests/integration/schema/migrations/query/with_inverse_test.go @@ -50,7 +50,7 @@ func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreicdkt3m6mgwuoix7qyijvwxwtj3dlre4a4c6mdnqbucbndwuxjsvi", - DestinationSchemaVersionID: "bafkreibpaw4dxy6bvmuoyegm7bwxyi24nubozmukemwiour4v62kz5ffuu", + DestinationSchemaVersionID: "bafkreigijxrkfpadmnkpagokjdy6zpwtryad32m6nkgsqrd452kjlfp46e", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -66,8 +66,8 @@ func TestSchemaMigrationQueryInversesAcrossMultipleVersions(t *testing.T) { }, testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreibpaw4dxy6bvmuoyegm7bwxyi24nubozmukemwiour4v62kz5ffuu", - DestinationSchemaVersionID: "bafkreickm4zodm2muw5qcctmssht63g57u7kxujqyoax4zb5c42zs4pdh4", + SourceSchemaVersionID: "bafkreigijxrkfpadmnkpagokjdy6zpwtryad32m6nkgsqrd452kjlfp46e", + DestinationSchemaVersionID: "bafkreibtmdbc3nbdt74xdwvfrez53fxwyz6nh4b6ppwsrxiqpj5zpwgole", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go index b5e7bdde03..d7dc9f10dd 100644 --- a/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_schema_branch_test.go @@ -47,7 +47,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocOnOtherSchemaBranch(t *testing. // Register the migration on both nodes. LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", - DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + DestinationSchemaVersionID: "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_p2p_test.go b/tests/integration/schema/migrations/query/with_p2p_test.go index f8b0197d5d..39adf5a5a8 100644 --- a/tests/integration/schema/migrations/query/with_p2p_test.go +++ b/tests/integration/schema/migrations/query/with_p2p_test.go @@ -47,7 +47,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtOlderSchemaVersion(t *testing // Register the migration on both nodes. LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", - DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + DestinationSchemaVersionID: "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -146,7 +146,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes // Register the migration on both nodes. LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", - DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + DestinationSchemaVersionID: "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -163,8 +163,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchOlderSchemaVersion(t *tes testUtils.ConfigureMigration{ // Register the migration on both nodes. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", - DestinationSchemaVersionID: "bafkreidiohu3klvu4f2fdqcywtpqild4v7spsn7ivsjtg6sea6ome2oc4i", + SourceSchemaVersionID: "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba", + DestinationSchemaVersionID: "bafkreiglqiiz6j7d5dokcle6juoz26uixxggc5zawqkgwcivmenvhob5jy", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -254,7 +254,7 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtNewerSchemaVersion(t *testing // Register the migration on both nodes. LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce", - DestinationSchemaVersionID: "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm", + DestinationSchemaVersionID: "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -355,8 +355,8 @@ func TestSchemaMigrationQueryWithP2PReplicatedDocAtMuchNewerSchemaVersionWithSch // Register a migration from version 2 to version 3 on both nodes. // There is no migration from version 1 to 2, thus node 1 has no knowledge of schema version 2. LensConfig: client.LensConfig{ - SourceSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", - DestinationSchemaVersionID: "bafkreib65lld2tdyvlilbumlcccftqwvflpgutugghf5afrnlhdg7dgyv4", + SourceSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", + DestinationSchemaVersionID: "bafkreicpdtq27uclgcyeqivvyjvojtk57a573y3upfhi3lvteytktyhlva", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_restart_test.go b/tests/integration/schema/migrations/query/with_restart_test.go index f44264312c..4f2c0f4ec7 100644 --- a/tests/integration/schema/migrations/query/with_restart_test.go +++ b/tests/integration/schema/migrations/query/with_restart_test.go @@ -46,7 +46,7 @@ func TestSchemaMigrationQueryWithRestart(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -100,7 +100,7 @@ func TestSchemaMigrationQueryWithRestartAndMigrationBeforeSchemaPatch(t *testing testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_set_default_test.go b/tests/integration/schema/migrations/query/with_set_default_test.go index 17c147338c..170a861d89 100644 --- a/tests/integration/schema/migrations/query/with_set_default_test.go +++ b/tests/integration/schema/migrations/query/with_set_default_test.go @@ -22,7 +22,7 @@ import ( ) func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t *testing.T) { - schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" + schemaVersionID2 := "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba" test := testUtils.TestCase{ Description: "Test schema migration", @@ -84,7 +84,7 @@ func TestSchemaMigrationQuery_WithSetDefaultToLatest_AppliesForwardMigration(t * func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t *testing.T) { schemaVersionID1 := "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce" - schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" + schemaVersionID2 := "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba" test := testUtils.TestCase{ Description: "Test schema migration", @@ -159,7 +159,7 @@ func TestSchemaMigrationQuery_WithSetDefaultToOriginal_AppliesInverseMigration(t func TestSchemaMigrationQuery_WithSetDefaultToOriginalVersionThatDocWasCreatedAt_ClearsMigrations(t *testing.T) { schemaVersionID1 := "bafkreibpai5hfnalhtn5mgamzkgml4gwftow7pklmjcn6i4sqey6a5u5ce" - schemaVersionID2 := "bafkreidrbhf54zckhmchzw2ngbobfqtkt7sm6ihbliu2wtxesehz5g4xwm" + schemaVersionID2 := "bafkreif7z5sj2ehtmjenverki7c2hqfjgvbajqdlch6yk4kkbx7qvm2yba" test := testUtils.TestCase{ Description: "Test schema migration", diff --git a/tests/integration/schema/migrations/query/with_txn_test.go b/tests/integration/schema/migrations/query/with_txn_test.go index 880f9e01ed..79d2d9e825 100644 --- a/tests/integration/schema/migrations/query/with_txn_test.go +++ b/tests/integration/schema/migrations/query/with_txn_test.go @@ -48,7 +48,7 @@ func TestSchemaMigrationQueryWithTxn(t *testing.T) { TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -110,7 +110,7 @@ func TestSchemaMigrationQueryWithTxnAndCommit(t *testing.T) { TransactionID: immutable.Some(0), LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/query/with_update_test.go b/tests/integration/schema/migrations/query/with_update_test.go index 93a2586e25..bbeabcd062 100644 --- a/tests/integration/schema/migrations/query/with_update_test.go +++ b/tests/integration/schema/migrations/query/with_update_test.go @@ -46,7 +46,7 @@ func TestSchemaMigrationQueryWithUpdateRequest(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -130,7 +130,7 @@ func TestSchemaMigrationQueryWithMigrationRegisteredAfterUpdate(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { diff --git a/tests/integration/schema/migrations/simple_test.go b/tests/integration/schema/migrations/simple_test.go index a7826f5366..e36c9ec836 100644 --- a/tests/integration/schema/migrations/simple_test.go +++ b/tests/integration/schema/migrations/simple_test.go @@ -107,7 +107,7 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { testUtils.ConfigureMigration{ LensConfig: client.LensConfig{ SourceSchemaVersionID: "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe", - DestinationSchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + DestinationSchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Lens: model.Lens{ Lenses: []model.LensModule{ { @@ -158,7 +158,7 @@ func TestSchemaMigrationGetMigrationsReturnsMultiple(t *testing.T) { }, { ID: 4, - SchemaVersionID: "bafkreib5jaawobqqiu6frzacerlj55pxxxuql3igqj4ldmg2pgilke4bty", + SchemaVersionID: "bafkreiahhaeagyfsxaxmv3d665qvnbtyn3ts6jshhghy5bijwztbe7efpq", Sources: []any{ &client.CollectionSource{ SourceCollectionID: 3, diff --git a/tests/integration/schema/one_one_test.go b/tests/integration/schema/one_one_test.go index b5bc75bb48..8bc1e5a1fe 100644 --- a/tests/integration/schema/one_one_test.go +++ b/tests/integration/schema/one_one_test.go @@ -30,7 +30,9 @@ func TestSchemaOneOne_NoPrimary_Errors(t *testing.T) { owner: User } `, - ExpectedError: "relation missing field. Object: Dog, RelationName: dog_user", + // This error is dependent upon the order in which definitions are validated, so + // we only assert that the error is the correct type, and do not check the key-values + ExpectedError: "relation missing field", }, }, } diff --git a/tests/integration/schema/updates/add/field/create_update_test.go b/tests/integration/schema/updates/add/field/create_update_test.go index cd3a0b1267..53b892e0ae 100644 --- a/tests/integration/schema/updates/add/field/create_update_test.go +++ b/tests/integration/schema/updates/add/field/create_update_test.go @@ -18,7 +18,7 @@ import ( func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoin(t *testing.T) { initialSchemaVersionID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - updatedSchemaVersionID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + updatedSchemaVersionID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, version join", @@ -106,7 +106,7 @@ func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndVersionJoi func TestSchemaUpdatesAddFieldWithCreateWithUpdateAfterSchemaUpdateAndCommitQuery(t *testing.T) { initialSchemaVersionID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - updatedSchemaVersionID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + updatedSchemaVersionID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" test := testUtils.TestCase{ Description: "Test schema update, add field with update after schema update, commits query", diff --git a/tests/integration/schema/updates/add/field/simple_test.go b/tests/integration/schema/updates/add/field/simple_test.go index 80aaec32d6..d315791dfa 100644 --- a/tests/integration/schema/updates/add/field/simple_test.go +++ b/tests/integration/schema/updates/add/field/simple_test.go @@ -21,7 +21,7 @@ import ( func TestSchemaUpdatesAddFieldSimple(t *testing.T) { schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion2ID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -60,7 +60,7 @@ func TestSchemaUpdatesAddFieldSimple(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -116,7 +116,7 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_Errors(t *testing.T) { func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testing.T) { schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" + schemaVersion2ID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" test := testUtils.TestCase{ Description: "Test schema update, add field", @@ -149,7 +149,7 @@ func TestSchemaUpdates_AddFieldSimpleDoNotSetDefault_VersionIsQueryable(t *testi { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -362,7 +362,7 @@ func TestSchemaUpdatesAddFieldSimpleDuplicateOfExistingField(t *testing.T) { { "op": "add", "path": "/Users/Fields/-", "value": {"Name": "name", "Kind": 11} } ] `, - ExpectedError: "duplicate field. Name: name", + ExpectedError: "mutating an existing field is not supported. ProposedName: name", }, }, } diff --git a/tests/integration/schema/updates/add/simple_test.go b/tests/integration/schema/updates/add/simple_test.go index 88d36680b0..65d04a7af6 100644 --- a/tests/integration/schema/updates/add/simple_test.go +++ b/tests/integration/schema/updates/add/simple_test.go @@ -33,7 +33,7 @@ func TestSchemaUpdatesAddSimpleErrorsAddingSchema(t *testing.T) { { "op": "add", "path": "/-", "value": {"Name": "books"} } ] `, - ExpectedError: "adding collections via patch is not supported. Name: books", + ExpectedError: "adding schema via patch is not supported. Name: books", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/schema/updates/copy/field/simple_test.go b/tests/integration/schema/updates/copy/field/simple_test.go index a2c631a515..5baf640ab8 100644 --- a/tests/integration/schema/updates/copy/field/simple_test.go +++ b/tests/integration/schema/updates/copy/field/simple_test.go @@ -34,7 +34,7 @@ func TestSchemaUpdatesCopyFieldErrors(t *testing.T) { { "op": "copy", "from": "/Users/Fields/1", "path": "/Users/Fields/2" } ] `, - ExpectedError: "duplicate field. Name: email", + ExpectedError: "moving fields is not currently supported. Name: email", }, testUtils.Request{ Request: `query { diff --git a/tests/integration/schema/updates/copy/simple_test.go b/tests/integration/schema/updates/copy/simple_test.go index cdda8abaf8..0f5a691149 100644 --- a/tests/integration/schema/updates/copy/simple_test.go +++ b/tests/integration/schema/updates/copy/simple_test.go @@ -38,7 +38,7 @@ func TestSchemaUpdatesCopyCollectionWithRemoveIDAndReplaceName(t *testing.T) { { "op": "replace", "path": "/Book/Name", "value": "Book" } ] `, - ExpectedError: "adding collections via patch is not supported. Name: Book", + ExpectedError: "adding schema via patch is not supported. Name: Book", }, }, } diff --git a/tests/integration/schema/updates/remove/simple_test.go b/tests/integration/schema/updates/remove/simple_test.go index e9e4f139ae..d0343484e5 100644 --- a/tests/integration/schema/updates/remove/simple_test.go +++ b/tests/integration/schema/updates/remove/simple_test.go @@ -34,7 +34,7 @@ func TestSchemaUpdatesRemoveCollectionNameErrors(t *testing.T) { { "op": "remove", "path": "/Users/Name" } ] `, - ExpectedError: "schema name can't be empty", + ExpectedError: "SchemaRoot does not match existing. Name: ", }, }, } @@ -118,7 +118,7 @@ func TestSchemaUpdatesRemoveSchemaNameErrors(t *testing.T) { { "op": "remove", "path": "/Users/Name" } ] `, - ExpectedError: "schema name can't be empty", + ExpectedError: "SchemaRoot does not match existing. Name: ", }, }, } diff --git a/tests/integration/schema/updates/replace/simple_test.go b/tests/integration/schema/updates/replace/simple_test.go index 722ff36f9b..7e403f7d03 100644 --- a/tests/integration/schema/updates/replace/simple_test.go +++ b/tests/integration/schema/updates/replace/simple_test.go @@ -44,7 +44,7 @@ func TestSchemaUpdatesReplaceCollectionErrors(t *testing.T) { // WARNING: An error is still expected if/when we allow the adding of collections, as this also // implies that the "Users" collection is to be deleted. Only once we support the adding *and* // removal of collections should this not error. - ExpectedError: "adding collections via patch is not supported. Name: Book", + ExpectedError: "adding schema via patch is not supported. Name: Book", }, }, } diff --git a/tests/integration/schema/updates/with_schema_branch_test.go b/tests/integration/schema/updates/with_schema_branch_test.go index d8f7d1afc2..58759f3edd 100644 --- a/tests/integration/schema/updates/with_schema_branch_test.go +++ b/tests/integration/schema/updates/with_schema_branch_test.go @@ -21,8 +21,8 @@ import ( func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" - schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" + schemaVersion2ID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" + schemaVersion3ID := "bafkreifc46y7pk2xfwc3nc442r7iqf6cjixxerxrrnrsouky544gmz4zve" test := testUtils.TestCase{ Description: "Test schema update, with branching schema", @@ -74,7 +74,7 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -112,7 +112,7 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -170,9 +170,9 @@ func TestSchemaUpdates_WithBranchingSchema(t *testing.T) { func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" - schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" - schemaVersion4ID := "bafkreid4ulxeclzgpzhznge7zdin6docxvklugvr6gt4jxfyanz5i2r2hu" + schemaVersion2ID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" + schemaVersion3ID := "bafkreifc46y7pk2xfwc3nc442r7iqf6cjixxerxrrnrsouky544gmz4zve" + schemaVersion4ID := "bafkreic2heai3vgufxcxs6bfvil2oyz27w3bzkwoqehjevlnkewq3ffp4e" test := testUtils.TestCase{ Description: "Test schema update, with patch on branching schema", @@ -234,7 +234,7 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", @@ -308,8 +308,8 @@ func TestSchemaUpdates_WithPatchOnBranchedSchema(t *testing.T) { func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *testing.T) { schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" - schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" + schemaVersion2ID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" + schemaVersion3ID := "bafkreifc46y7pk2xfwc3nc442r7iqf6cjixxerxrrnrsouky544gmz4zve" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches", @@ -404,9 +404,9 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranch(t *tes func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPatch(t *testing.T) { schemaVersion1ID := "bafkreia3o3cetvcnnxyu5spucimoos77ifungfmacxdkva4zah2is3aooe" - schemaVersion2ID := "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4" - schemaVersion3ID := "bafkreifswbi23wxvq2zpqnoldolsxk2fhtj5t6rs3pidil3j6tybc62q3m" - schemaVersion4ID := "bafkreidjuyxhakc5yx7fucunoxijnfjvgqohf4sjoryzf27mqxidh37kne" + schemaVersion2ID := "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe" + schemaVersion3ID := "bafkreifc46y7pk2xfwc3nc442r7iqf6cjixxerxrrnrsouky544gmz4zve" + schemaVersion4ID := "bafkreifdkkauc4b4rkazmzijiu2nxlikqatxa5zbmjc4sn3wrtlcqqcrt4" test := testUtils.TestCase{ Description: "Test schema update, with branching schema toggling between branches then patch", @@ -472,7 +472,7 @@ func TestSchemaUpdates_WithBranchingSchemaAndSetActiveSchemaToOtherBranchThenPat { Name: "_docID", Kind: client.FieldKind_DocID, - Typ: client.LWW_REGISTER, + Typ: client.NONE_CRDT, }, { Name: "name", diff --git a/tests/integration/schema/with_update_set_default_test.go b/tests/integration/schema/with_update_set_default_test.go index f46e0540e3..22f05a4d73 100644 --- a/tests/integration/schema/with_update_set_default_test.go +++ b/tests/integration/schema/with_update_set_default_test.go @@ -129,7 +129,7 @@ func TestSchema_WithUpdateAndSetDefaultVersionToNew_AllowsQueryingOfNewField(t * SetAsDefaultVersion: immutable.Some(false), }, testUtils.SetActiveSchemaVersion{ - SchemaVersionID: "bafkreibz4g6rkxanzn6ro74ezmbwoe5hvcguwvi34judrk2kfuqqtk5ak4", + SchemaVersionID: "bafkreidt4i22v4bzga3aezlcxsrfbvuhzcbqo5bnfe2x2dgkpz3eds2afe", }, testUtils.Request{ Request: `query {