diff --git a/db/gen/coredb/batch.go b/db/gen/coredb/batch.go index ba81043f7..9c97070f7 100644 --- a/db/gen/coredb/batch.go +++ b/db/gen/coredb/batch.go @@ -1712,7 +1712,7 @@ func (b *GetGalleryByIdBatchBatchResults) Close() error { } const getGalleryTokenMediasByGalleryIDBatch = `-- name: GetGalleryTokenMediasByGalleryIDBatch :batchmany -select tm.id, tm.created_at, tm.last_updated, tm.version, tm.contract_id, tm.token_id, tm.chain, tm.active, tm.metadata, tm.media, tm.name__deprecated, tm.description__deprecated, tm.processing_job_id, tm.deleted +select tm.id, tm.created_at, tm.last_updated, tm.version, tm.contract_id__deprecated, tm.token_id__deprecated, tm.chain__deprecated, tm.active, tm.metadata__deprecated, tm.media, tm.name__deprecated, tm.description__deprecated, tm.processing_job_id, tm.deleted from galleries g, collections c, tokens t, token_medias tm where g.id = $1 @@ -1772,11 +1772,11 @@ func (b *GetGalleryTokenMediasByGalleryIDBatchBatchResults) Query(f func(int, [] &i.CreatedAt, &i.LastUpdated, &i.Version, - &i.ContractID, - &i.TokenID, - &i.Chain, + &i.ContractIDDeprecated, + &i.TokenIDDeprecated, + &i.ChainDeprecated, &i.Active, - &i.Metadata, + &i.MetadataDeprecated, &i.Media, &i.NameDeprecated, &i.DescriptionDeprecated, @@ -1801,7 +1801,7 @@ func (b *GetGalleryTokenMediasByGalleryIDBatchBatchResults) Close() error { } const getMediaByTokenIDIgnoringStatus = `-- name: GetMediaByTokenIDIgnoringStatus :batchone -select m.id, m.created_at, m.last_updated, m.version, m.contract_id, m.token_id, m.chain, m.active, m.metadata, m.media, m.name__deprecated, m.description__deprecated, m.processing_job_id, m.deleted +select m.id, m.created_at, m.last_updated, m.version, m.contract_id__deprecated, m.token_id__deprecated, m.chain__deprecated, m.active, m.metadata__deprecated, m.media, m.name__deprecated, m.description__deprecated, m.processing_job_id, m.deleted from token_medias m where m.id = (select token_media_id from tokens where tokens.id = $1) and not m.deleted ` @@ -1840,11 +1840,11 @@ func (b *GetMediaByTokenIDIgnoringStatusBatchResults) QueryRow(f func(int, Token &i.CreatedAt, &i.LastUpdated, &i.Version, - &i.ContractID, - &i.TokenID, - &i.Chain, + &i.ContractIDDeprecated, + &i.TokenIDDeprecated, + &i.ChainDeprecated, &i.Active, - &i.Metadata, + &i.MetadataDeprecated, &i.Media, &i.NameDeprecated, &i.DescriptionDeprecated, diff --git a/db/gen/coredb/models_gen.go b/db/gen/coredb/models_gen.go index d8e1c722f..a6fc361cf 100644 --- a/db/gen/coredb/models_gen.go +++ b/db/gen/coredb/models_gen.go @@ -532,26 +532,28 @@ type TokenDefinition struct { ExternalUrl sql.NullString `json:"external_url"` Chain persist.Chain `json:"chain"` IsProviderMarkedSpam bool `json:"is_provider_marked_spam"` + Metadata persist.TokenMetadata `json:"metadata"` FallbackMedia persist.FallbackMedia `json:"fallback_media"` + ContractAddress persist.Address `json:"contract_address"` ContractID persist.DBID `json:"contract_id"` TokenMediaID persist.DBID `json:"token_media_id"` } type TokenMedia struct { - ID persist.DBID `json:"id"` - CreatedAt time.Time `json:"created_at"` - LastUpdated time.Time `json:"last_updated"` - Version int32 `json:"version"` - ContractID persist.DBID `json:"contract_id"` - TokenID persist.TokenID `json:"token_id"` - Chain persist.Chain `json:"chain"` - Active bool `json:"active"` - Metadata persist.TokenMetadata `json:"metadata"` - Media persist.Media `json:"media"` - NameDeprecated string `json:"name__deprecated"` - DescriptionDeprecated string `json:"description__deprecated"` - ProcessingJobID persist.DBID `json:"processing_job_id"` - Deleted bool `json:"deleted"` + ID persist.DBID `json:"id"` + CreatedAt time.Time `json:"created_at"` + LastUpdated time.Time `json:"last_updated"` + Version int32 `json:"version"` + ContractIDDeprecated string `json:"contract_id__deprecated"` + TokenIDDeprecated string `json:"token_id__deprecated"` + ChainDeprecated int32 `json:"chain__deprecated"` + Active bool `json:"active"` + MetadataDeprecated pgtype.JSONB `json:"metadata__deprecated"` + Media persist.Media `json:"media"` + NameDeprecated string `json:"name__deprecated"` + DescriptionDeprecated string `json:"description__deprecated"` + ProcessingJobID persist.DBID `json:"processing_job_id"` + Deleted bool `json:"deleted"` } type TokenMediasActive struct { diff --git a/db/gen/coredb/query.sql.go b/db/gen/coredb/query.sql.go index 1d14f6c30..dfb6a2472 100644 --- a/db/gen/coredb/query.sql.go +++ b/db/gen/coredb/query.sql.go @@ -1846,36 +1846,6 @@ func (q *Queries) GetContractByID(ctx context.Context, id persist.DBID) (Contrac return i, err } -const getContractByTokenDefinitionId = `-- name: GetContractByTokenDefinitionId :one -select id, deleted, version, created_at, last_updated, name, symbol, address, creator_address, chain, profile_banner_url, profile_image_url, badge_url, description, owner_address, is_provider_marked_spam, parent_id, override_creator_user_id from contracts where contracts.id = (select contract_id from token_definitions where token_definitions.id = $1 and not token_definitions.deleted) and not contracts.deleted = false -` - -func (q *Queries) GetContractByTokenDefinitionId(ctx context.Context, id persist.DBID) (Contract, error) { - row := q.db.QueryRow(ctx, getContractByTokenDefinitionId, id) - var i Contract - err := row.Scan( - &i.ID, - &i.Deleted, - &i.Version, - &i.CreatedAt, - &i.LastUpdated, - &i.Name, - &i.Symbol, - &i.Address, - &i.CreatorAddress, - &i.Chain, - &i.ProfileBannerUrl, - &i.ProfileImageUrl, - &i.BadgeUrl, - &i.Description, - &i.OwnerAddress, - &i.IsProviderMarkedSpam, - &i.ParentID, - &i.OverrideCreatorUserID, - ) - return i, err -} - const getContractCreatorsByContractIDs = `-- name: GetContractCreatorsByContractIDs :many with contract_creators as ( select c.id as contract_id, @@ -2758,62 +2728,38 @@ func (q *Queries) GetLastFeedEventForUser(ctx context.Context, arg GetLastFeedEv } const getMediaByUserTokenIdentifiers = `-- name: GetMediaByUserTokenIdentifiers :one -with contract as ( - select id, deleted, version, created_at, last_updated, name, symbol, address, creator_address, chain, profile_banner_url, profile_image_url, badge_url, description, owner_address, is_provider_marked_spam, parent_id, override_creator_user_id from contracts where contracts.chain = $1 and contracts.address = $2 and not contracts.deleted -), -matching_media as ( - select token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted - from token_medias, contract - where token_medias.contract_id = contract.id and token_medias.chain = $1 and token_medias.token_id = $3 and not token_medias.deleted - order by token_medias.active desc, token_medias.last_updated desc - limit 1 -), -matched_token(id) as ( - select tokens.id - from tokens, contract, matching_media - where tokens.contract = contract.id and tokens.chain = $1 and tokens.token_id = $3 and not tokens.deleted - order by tokens.owner_user_id = $4 desc, tokens.token_media_id = matching_media.id desc, tokens.last_updated desc - limit 1 -) -select token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, (select id from matched_token) token_instance_id from matching_media token_medias +select token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted +from token_medias +join token_definitions on token_definitions.token_media_id = token_medias.id +where (token_definitions.chain, token_definitions.contract_address, token_definitions.token_id) = ($1, $2, $3) + and not token_definitions.deleted +order by token_medias.active desc, token_medias.last_updated desc ` type GetMediaByUserTokenIdentifiersParams struct { - Chain persist.Chain `json:"chain"` - Address persist.Address `json:"address"` - TokenID persist.TokenID `json:"token_id"` - UserID persist.DBID `json:"user_id"` -} - -type GetMediaByUserTokenIdentifiersRow struct { - TokenMedia TokenMedia `json:"tokenmedia"` - TokenInstanceID persist.DBID `json:"token_instance_id"` + Chain persist.Chain `json:"chain"` + ContractAddress persist.Address `json:"contract_address"` + TokenUd sql.NullInt32 `json:"token_ud"` } -func (q *Queries) GetMediaByUserTokenIdentifiers(ctx context.Context, arg GetMediaByUserTokenIdentifiersParams) (GetMediaByUserTokenIdentifiersRow, error) { - row := q.db.QueryRow(ctx, getMediaByUserTokenIdentifiers, - arg.Chain, - arg.Address, - arg.TokenID, - arg.UserID, - ) - var i GetMediaByUserTokenIdentifiersRow +func (q *Queries) GetMediaByUserTokenIdentifiers(ctx context.Context, arg GetMediaByUserTokenIdentifiersParams) (TokenMedia, error) { + row := q.db.QueryRow(ctx, getMediaByUserTokenIdentifiers, arg.Chain, arg.ContractAddress, arg.TokenUd) + var i TokenMedia err := row.Scan( - &i.TokenMedia.ID, - &i.TokenMedia.CreatedAt, - &i.TokenMedia.LastUpdated, - &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, - &i.TokenMedia.Active, - &i.TokenMedia.Metadata, - &i.TokenMedia.Media, - &i.TokenMedia.NameDeprecated, - &i.TokenMedia.DescriptionDeprecated, - &i.TokenMedia.ProcessingJobID, - &i.TokenMedia.Deleted, - &i.TokenInstanceID, + &i.ID, + &i.CreatedAt, + &i.LastUpdated, + &i.Version, + &i.ContractIDDeprecated, + &i.TokenIDDeprecated, + &i.ChainDeprecated, + &i.Active, + &i.MetadataDeprecated, + &i.Media, + &i.NameDeprecated, + &i.DescriptionDeprecated, + &i.ProcessingJobID, + &i.Deleted, ) return i, err } @@ -3261,7 +3207,7 @@ func (q *Queries) GetPostsByIds(ctx context.Context, postIds []string) ([]Post, } const getPotentialENSProfileImageByUserId = `-- name: GetPotentialENSProfileImageByUserId :one -select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, wallets.id, wallets.created_at, wallets.last_updated, wallets.deleted, wallets.version, wallets.address, wallets.wallet_type, wallets.chain +select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, wallets.id, wallets.created_at, wallets.last_updated, wallets.deleted, wallets.version, wallets.address, wallets.wallet_type, wallets.chain from token_definitions, tokens, contracts, users, token_medias, wallets, unnest(tokens.owned_by_wallets) tw(id) where contracts.address = $1 and contracts.chain = $2 @@ -3309,18 +3255,20 @@ func (q *Queries) GetPotentialENSProfileImageByUserId(ctx context.Context, arg G &i.TokenDefinition.ExternalUrl, &i.TokenDefinition.Chain, &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, &i.TokenDefinition.ContractID, &i.TokenDefinition.TokenMediaID, &i.TokenMedia.ID, &i.TokenMedia.CreatedAt, &i.TokenMedia.LastUpdated, &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, &i.TokenMedia.Active, - &i.TokenMedia.Metadata, + &i.TokenMedia.MetadataDeprecated, &i.TokenMedia.Media, &i.TokenMedia.NameDeprecated, &i.TokenMedia.DescriptionDeprecated, @@ -3995,70 +3943,8 @@ func (q *Queries) GetTokenByUserTokenIdentifiers(ctx context.Context, arg GetTok return i, err } -const getTokenDefinitionAndContractByTokenIdentifiers = `-- name: GetTokenDefinitionAndContractByTokenIdentifiers :one -select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id -from token_definitions, contracts -where contracts.chain = $1 - and contracts.address = $2 - and token_definitions.token_id = $3 - and not token_definitions.deleted - and not token_medias.deleted - and not contracts.deleted -` - -type GetTokenDefinitionAndContractByTokenIdentifiersParams struct { - Chain persist.Chain `json:"chain"` - ContractAddress persist.Address `json:"contract_address"` - TokenID persist.TokenID `json:"token_id"` -} - -type GetTokenDefinitionAndContractByTokenIdentifiersRow struct { - TokenDefinition TokenDefinition `json:"tokendefinition"` - Contract Contract `json:"contract"` -} - -func (q *Queries) GetTokenDefinitionAndContractByTokenIdentifiers(ctx context.Context, arg GetTokenDefinitionAndContractByTokenIdentifiersParams) (GetTokenDefinitionAndContractByTokenIdentifiersRow, error) { - row := q.db.QueryRow(ctx, getTokenDefinitionAndContractByTokenIdentifiers, arg.Chain, arg.ContractAddress, arg.TokenID) - var i GetTokenDefinitionAndContractByTokenIdentifiersRow - err := row.Scan( - &i.TokenDefinition.ID, - &i.TokenDefinition.CreatedAt, - &i.TokenDefinition.LastUpdated, - &i.TokenDefinition.Deleted, - &i.TokenDefinition.Name, - &i.TokenDefinition.Description, - &i.TokenDefinition.TokenType, - &i.TokenDefinition.TokenID, - &i.TokenDefinition.ExternalUrl, - &i.TokenDefinition.Chain, - &i.TokenDefinition.IsProviderMarkedSpam, - &i.TokenDefinition.FallbackMedia, - &i.TokenDefinition.ContractID, - &i.TokenDefinition.TokenMediaID, - &i.Contract.ID, - &i.Contract.Deleted, - &i.Contract.Version, - &i.Contract.CreatedAt, - &i.Contract.LastUpdated, - &i.Contract.Name, - &i.Contract.Symbol, - &i.Contract.Address, - &i.Contract.CreatorAddress, - &i.Contract.Chain, - &i.Contract.ProfileBannerUrl, - &i.Contract.ProfileImageUrl, - &i.Contract.BadgeUrl, - &i.Contract.Description, - &i.Contract.OwnerAddress, - &i.Contract.IsProviderMarkedSpam, - &i.Contract.ParentID, - &i.Contract.OverrideCreatorUserID, - ) - return i, err -} - const getTokenDefinitionAndMediaByTokenIdentifiers = `-- name: GetTokenDefinitionAndMediaByTokenIdentifiers :one -select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted +select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted from token_definitions, token_medias where token_definitions.chain = $1 and contract_id = ( @@ -4098,18 +3984,20 @@ func (q *Queries) GetTokenDefinitionAndMediaByTokenIdentifiers(ctx context.Conte &i.TokenDefinition.ExternalUrl, &i.TokenDefinition.Chain, &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, &i.TokenDefinition.ContractID, &i.TokenDefinition.TokenMediaID, &i.TokenMedia.ID, &i.TokenMedia.CreatedAt, &i.TokenMedia.LastUpdated, &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, &i.TokenMedia.Active, - &i.TokenMedia.Metadata, + &i.TokenMedia.MetadataDeprecated, &i.TokenMedia.Media, &i.TokenMedia.NameDeprecated, &i.TokenMedia.DescriptionDeprecated, @@ -4119,12 +4007,39 @@ func (q *Queries) GetTokenDefinitionAndMediaByTokenIdentifiers(ctx context.Conte return i, err } +const getTokenDefinitionById = `-- name: GetTokenDefinitionById :one +select id, created_at, last_updated, deleted, name, description, token_type, token_id, external_url, chain, is_provider_marked_spam, metadata, fallback_media, contract_address, contract_id, token_media_id from token_definitions where id = $1 and not deleted +` + +func (q *Queries) GetTokenDefinitionById(ctx context.Context, id persist.DBID) (TokenDefinition, error) { + row := q.db.QueryRow(ctx, getTokenDefinitionById, id) + var i TokenDefinition + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.LastUpdated, + &i.Deleted, + &i.Name, + &i.Description, + &i.TokenType, + &i.TokenID, + &i.ExternalUrl, + &i.Chain, + &i.IsProviderMarkedSpam, + &i.Metadata, + &i.FallbackMedia, + &i.ContractAddress, + &i.ContractID, + &i.TokenMediaID, + ) + return i, err +} + const getTokenDefinitionByTokenDbid = `-- name: GetTokenDefinitionByTokenDbid :one -select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id +select token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id from token_definitions, tokens where token_definitions.id = tokens.token_definition_id and tokens.id = $1 - and tokens.displayable and not tokens.deleted and not token_definitions.deleted ` @@ -4144,7 +4059,9 @@ func (q *Queries) GetTokenDefinitionByTokenDbid(ctx context.Context, id persist. &i.ExternalUrl, &i.Chain, &i.IsProviderMarkedSpam, + &i.Metadata, &i.FallbackMedia, + &i.ContractAddress, &i.ContractID, &i.TokenMediaID, ) @@ -4152,7 +4069,7 @@ func (q *Queries) GetTokenDefinitionByTokenDbid(ctx context.Context, id persist. } const getTokenDefinitionByTokenIdentifiers = `-- name: GetTokenDefinitionByTokenIdentifiers :one -select id, created_at, last_updated, deleted, name, description, token_type, token_id, external_url, chain, is_provider_marked_spam, fallback_media, contract_id, token_media_id +select id, created_at, last_updated, deleted, name, description, token_type, token_id, external_url, chain, is_provider_marked_spam, metadata, fallback_media, contract_address, contract_id, token_media_id from token_definitions where token_definitions.chain = $1 and contract_id = ( @@ -4185,7 +4102,9 @@ func (q *Queries) GetTokenDefinitionByTokenIdentifiers(ctx context.Context, arg &i.ExternalUrl, &i.Chain, &i.IsProviderMarkedSpam, + &i.Metadata, &i.FallbackMedia, + &i.ContractAddress, &i.ContractID, &i.TokenMediaID, ) @@ -4193,7 +4112,7 @@ func (q *Queries) GetTokenDefinitionByTokenIdentifiers(ctx context.Context, arg } const getTokenFullDetailsByContractId = `-- name: GetTokenFullDetailsByContractId :many -select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id +select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id from tokens join token_definitions on tokens.token_definition_id = token_definitions.id join contracts on token_definitions.contract_id = contracts.id @@ -4258,18 +4177,20 @@ func (q *Queries) GetTokenFullDetailsByContractId(ctx context.Context, id persis &i.TokenDefinition.ExternalUrl, &i.TokenDefinition.Chain, &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, &i.TokenDefinition.ContractID, &i.TokenDefinition.TokenMediaID, &i.TokenMedia.ID, &i.TokenMedia.CreatedAt, &i.TokenMedia.LastUpdated, &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, &i.TokenMedia.Active, - &i.TokenMedia.Metadata, + &i.TokenMedia.MetadataDeprecated, &i.TokenMedia.Media, &i.TokenMedia.NameDeprecated, &i.TokenMedia.DescriptionDeprecated, @@ -4305,7 +4226,7 @@ func (q *Queries) GetTokenFullDetailsByContractId(ctx context.Context, id persis } const getTokenFullDetailsByTokenDbid = `-- name: GetTokenFullDetailsByTokenDbid :one -select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id +select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id from tokens join token_definitions on tokens.token_definition_id = token_definitions.id join contracts on token_definitions.contract_id = contracts.id @@ -4363,18 +4284,20 @@ func (q *Queries) GetTokenFullDetailsByTokenDbid(ctx context.Context, id persist &i.TokenDefinition.ExternalUrl, &i.TokenDefinition.Chain, &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, &i.TokenDefinition.ContractID, &i.TokenDefinition.TokenMediaID, &i.TokenMedia.ID, &i.TokenMedia.CreatedAt, &i.TokenMedia.LastUpdated, &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, &i.TokenMedia.Active, - &i.TokenMedia.Metadata, + &i.TokenMedia.MetadataDeprecated, &i.TokenMedia.Media, &i.TokenMedia.NameDeprecated, &i.TokenMedia.DescriptionDeprecated, @@ -4402,8 +4325,122 @@ func (q *Queries) GetTokenFullDetailsByTokenDbid(ctx context.Context, id persist return i, err } +const getTokenFullDetailsByTokenDefinitionId = `-- name: GetTokenFullDetailsByTokenDefinitionId :many +select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id +from tokens +join token_definitions on tokens.token_definition_id = token_definitions.id +join contracts on token_definitions.contract_id = contracts.id +left join token_medias on token_definitions.token_media_id = token_medias.id +where token_definitions.id = $1 and tokens.displayable and not tokens.deleted and not token_definitions.deleted and not contracts.deleted +order by tokens.block_number desc +` + +type GetTokenFullDetailsByTokenDefinitionIdRow struct { + Token Token `json:"token"` + TokenDefinition TokenDefinition `json:"tokendefinition"` + TokenMedia TokenMedia `json:"tokenmedia"` + Contract Contract `json:"contract"` +} + +func (q *Queries) GetTokenFullDetailsByTokenDefinitionId(ctx context.Context, id persist.DBID) ([]GetTokenFullDetailsByTokenDefinitionIdRow, error) { + rows, err := q.db.Query(ctx, getTokenFullDetailsByTokenDefinitionId, id) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetTokenFullDetailsByTokenDefinitionIdRow + for rows.Next() { + var i GetTokenFullDetailsByTokenDefinitionIdRow + if err := rows.Scan( + &i.Token.ID, + &i.Token.Deleted, + &i.Token.Version, + &i.Token.CreatedAt, + &i.Token.LastUpdated, + &i.Token.NameDeprecated, + &i.Token.DescriptionDeprecated, + &i.Token.CollectorsNote, + &i.Token.TokenUriDeprecated, + &i.Token.TokenTypeDeprecated, + &i.Token.TokenID, + &i.Token.Quantity, + &i.Token.OwnershipHistoryDeprecated, + &i.Token.ExternalUrlDeprecated, + &i.Token.BlockNumber, + &i.Token.OwnerUserID, + &i.Token.OwnedByWallets, + &i.Token.Chain, + &i.Token.ContractID, + &i.Token.IsUserMarkedSpam, + &i.Token.IsProviderMarkedSpamDeprecated, + &i.Token.LastSynced, + &i.Token.FallbackMediaDeprecated, + &i.Token.TokenMediaIDDeprecated, + &i.Token.IsCreatorToken, + &i.Token.IsHolderToken, + &i.Token.Displayable, + &i.Token.TokenDefinitionID, + &i.TokenDefinition.ID, + &i.TokenDefinition.CreatedAt, + &i.TokenDefinition.LastUpdated, + &i.TokenDefinition.Deleted, + &i.TokenDefinition.Name, + &i.TokenDefinition.Description, + &i.TokenDefinition.TokenType, + &i.TokenDefinition.TokenID, + &i.TokenDefinition.ExternalUrl, + &i.TokenDefinition.Chain, + &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, + &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, + &i.TokenDefinition.ContractID, + &i.TokenDefinition.TokenMediaID, + &i.TokenMedia.ID, + &i.TokenMedia.CreatedAt, + &i.TokenMedia.LastUpdated, + &i.TokenMedia.Version, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, + &i.TokenMedia.Active, + &i.TokenMedia.MetadataDeprecated, + &i.TokenMedia.Media, + &i.TokenMedia.NameDeprecated, + &i.TokenMedia.DescriptionDeprecated, + &i.TokenMedia.ProcessingJobID, + &i.TokenMedia.Deleted, + &i.Contract.ID, + &i.Contract.Deleted, + &i.Contract.Version, + &i.Contract.CreatedAt, + &i.Contract.LastUpdated, + &i.Contract.Name, + &i.Contract.Symbol, + &i.Contract.Address, + &i.Contract.CreatorAddress, + &i.Contract.Chain, + &i.Contract.ProfileBannerUrl, + &i.Contract.ProfileImageUrl, + &i.Contract.BadgeUrl, + &i.Contract.Description, + &i.Contract.OwnerAddress, + &i.Contract.IsProviderMarkedSpam, + &i.Contract.ParentID, + &i.Contract.OverrideCreatorUserID, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getTokenFullDetailsByUserId = `-- name: GetTokenFullDetailsByUserId :many -select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id +select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id from tokens join token_definitions on tokens.token_definition_id = token_definitions.id join contracts on token_definitions.contract_id = contracts.id @@ -4468,18 +4505,20 @@ func (q *Queries) GetTokenFullDetailsByUserId(ctx context.Context, ownerUserID p &i.TokenDefinition.ExternalUrl, &i.TokenDefinition.Chain, &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, &i.TokenDefinition.ContractID, &i.TokenDefinition.TokenMediaID, &i.TokenMedia.ID, &i.TokenMedia.CreatedAt, &i.TokenMedia.LastUpdated, &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, &i.TokenMedia.Active, - &i.TokenMedia.Metadata, + &i.TokenMedia.MetadataDeprecated, &i.TokenMedia.Media, &i.TokenMedia.NameDeprecated, &i.TokenMedia.DescriptionDeprecated, @@ -6144,128 +6183,104 @@ func (q *Queries) InsertSpamContracts(ctx context.Context, arg InsertSpamContrac } const insertTokenPipelineResults = `-- name: InsertTokenPipelineResults :exec -with insert_job(id) as ( - insert into token_processing_jobs (id, token_properties, pipeline_metadata, processing_cause, processor_version) - values ($7, $8, $9, $10, $11) - returning id -), -insert_media_move_active_record(last_updated) as ( - insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) - ( - select $12, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, false, created_at, now() - from token_medias - where contract_id = $4 - and token_id = $5 - and chain = $3 - and active - and not deleted - and $13 = true - limit 1 - ) - returning last_updated -), -insert_media_add_record(insert_id, active, replaced_current) as ( - insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) - values ($14, $4, $5, $3, $15, $16, $1, $2, (select id from insert_job), $13, - -- Using timestamps generated from insert_media_move_active_record ensures that the new record is only inserted after the current media is moved - (select coalesce((select last_updated from insert_media_move_active_record), now())), - (select coalesce((select last_updated from insert_media_move_active_record), now())) - ) - on conflict (contract_id, token_id, chain) where active and not deleted do update - set metadata = excluded.metadata, - media = excluded.media, - name = coalesce(nullif(excluded.name, ''), token_medias.name), - description = coalesce(nullif(excluded.description, ''), token_medias.description), - processing_job_id = excluded.processing_job_id, - last_updated = now() - returning id as insert_id, active, id = $14 replaced_current -), -existing_active(id) as ( - select id - from token_medias - where chain = $3 and contract_id = $4 and token_id = $5 and active and not deleted - limit 1 -) -update tokens -set token_media_id = ( - case - -- The pipeline didn't produce active media, but one already exists so use that one - when not insert_medias.active and (select id from existing_active) is not null - then (select id from existing_active) - - -- The pipeline produced active media, or didn't produce active media but no active media existed before - else insert_medias.insert_id - end -), name = coalesce(nullif($1, ''), tokens.name), description = coalesce(nullif($2, ''), tokens.description), last_updated = now() -- update the duplicate fields on the token in the meantime before we get rid of these fields -from insert_media_add_record insert_medias -where - tokens.chain = $3 - and tokens.contract_id = $4 - and tokens.token_id = $5 - and not tokens.deleted - and ( - -- The case statement below handles which token instances get updated: - case - -- If the active media already existed, update tokens that have no media (new tokens that haven't been processed before) or tokens that don't use this media yet - when insert_medias.active and not insert_medias.replaced_current - then (tokens.token_media_id is null or tokens.token_media_id != insert_medias.insert_id) - - -- Brand new active media, update all tokens in the filter to use this media - when insert_medias.active and insert_medias.replaced_current - then 1 = 1 - - -- The pipeline run produced inactive media, only update the token instance (since it may have not been processed before) - -- Since there is no db constraint on inactive media, all inactive media is new - when not insert_medias.active - then tokens.id = $6 - - else 1 = 1 - end - ) -` - -type InsertTokenPipelineResultsParams struct { - Name interface{} `json:"name"` - Description interface{} `json:"description"` - Chain persist.Chain `json:"chain"` - ContractID persist.DBID `json:"contract_id"` - TokenID persist.TokenID `json:"token_id"` - TokenDbid string `json:"token_dbid"` - ProcessingJobID persist.DBID `json:"processing_job_id"` - TokenProperties persist.TokenProperties `json:"token_properties"` - PipelineMetadata persist.PipelineMetadata `json:"pipeline_metadata"` - ProcessingCause persist.ProcessingCause `json:"processing_cause"` - ProcessorVersion string `json:"processor_version"` - RetiredMediaID persist.DBID `json:"retired_media_id"` - Active interface{} `json:"active"` - NewMediaID persist.DBID `json:"new_media_id"` - Metadata persist.TokenMetadata `json:"metadata"` - Media persist.Media `json:"media"` -} - -// Optionally create an inactive record of the existing active record if the new media is also active -// Update the existing active record with the new media data -// This will return the existing active record if it exists. If the incoming record is active, -// this will still return the active record before the update, and not the new record. -func (q *Queries) InsertTokenPipelineResults(ctx context.Context, arg InsertTokenPipelineResultsParams) error { - _, err := q.db.Exec(ctx, insertTokenPipelineResults, - arg.Name, - arg.Description, - arg.Chain, - arg.ContractID, - arg.TokenID, - arg.TokenDbid, - arg.ProcessingJobID, - arg.TokenProperties, - arg.PipelineMetadata, - arg.ProcessingCause, - arg.ProcessorVersion, - arg.RetiredMediaID, - arg.Active, - arg.NewMediaID, - arg.Metadata, - arg.Media, - ) +select 'implement me' +` + +// with insert_job(id) as ( +// +// insert into token_processing_jobs (id, token_properties, pipeline_metadata, processing_cause, processor_version) +// values (@processing_job_id, @token_properties, @pipeline_metadata, @processing_cause, @processor_version) +// returning id +// +// ), +// -- Optionally create an inactive record of the existing active record if the new media is also active +// insert_media_move_active_record(last_updated) as ( +// +// insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) +// ( +// select @retired_media_id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, false, created_at, now() +// from token_medias +// where contract_id = @contract_id +// and token_id = @token_id +// and chain = @chain +// and active +// and not deleted +// and @active = true +// limit 1 +// ) +// returning last_updated +// +// ), +// -- Update the existing active record with the new media data +// insert_media_add_record(insert_id, active, replaced_current) as ( +// +// insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) +// values (@new_media_id, @contract_id, @token_id, @chain, @metadata, @media, @name, @description, (select id from insert_job), @active, +// -- Using timestamps generated from insert_media_move_active_record ensures that the new record is only inserted after the current media is moved +// (select coalesce((select last_updated from insert_media_move_active_record), now())), +// (select coalesce((select last_updated from insert_media_move_active_record), now())) +// ) +// on conflict (contract_id, token_id, chain) where active and not deleted do update +// set metadata = excluded.metadata, +// media = excluded.media, +// name = coalesce(nullif(excluded.name, ''), token_medias.name), +// description = coalesce(nullif(excluded.description, ''), token_medias.description), +// processing_job_id = excluded.processing_job_id, +// last_updated = now() +// returning id as insert_id, active, id = @new_media_id replaced_current +// +// ), +// -- This will return the existing active record if it exists. If the incoming record is active, +// -- this will still return the active record before the update, and not the new record. +// existing_active(id) as ( +// +// select id +// from token_medias +// where chain = @chain and contract_id = @contract_id and token_id = @token_id and active and not deleted +// limit 1 +// +// ) +// update tokens +// set token_media_id = ( +// +// case +// -- The pipeline didn't produce active media, but one already exists so use that one +// when not insert_medias.active and (select id from existing_active) is not null +// then (select id from existing_active) +// +// -- The pipeline produced active media, or didn't produce active media but no active media existed before +// else insert_medias.insert_id +// end +// +// ), name = coalesce(nullif(@name, ”), tokens.name), description = coalesce(nullif(@description, ”), tokens.description), last_updated = now() -- update the duplicate fields on the token in the meantime before we get rid of these fields +// from insert_media_add_record insert_medias +// where +// +// tokens.chain = @chain +// and tokens.contract_id = @contract_id +// and tokens.token_id = @token_id +// and not tokens.deleted +// and ( +// -- The case statement below handles which token instances get updated: +// case +// -- If the active media already existed, update tokens that have no media (new tokens that haven't been processed before) or tokens that don't use this media yet +// when insert_medias.active and not insert_medias.replaced_current +// then (tokens.token_media_id is null or tokens.token_media_id != insert_medias.insert_id) +// +// -- Brand new active media, update all tokens in the filter to use this media +// when insert_medias.active and insert_medias.replaced_current +// then 1 = 1 +// +// -- The pipeline run produced inactive media, only update the token instance (since it may have not been processed before) +// -- Since there is no db constraint on inactive media, all inactive media is new +// when not insert_medias.active +// then tokens.id = @token_dbid +// +// else 1 = 1 +// end +// ); +func (q *Queries) InsertTokenPipelineResults(ctx context.Context) error { + _, err := q.db.Exec(ctx, insertTokenPipelineResults) return err } diff --git a/db/gen/coredb/token_upsert.sql.go b/db/gen/coredb/token_upsert.sql.go index f46616b40..bde27ad1c 100644 --- a/db/gen/coredb/token_upsert.sql.go +++ b/db/gen/coredb/token_upsert.sql.go @@ -71,6 +71,7 @@ with token_definitions_insert as ( , is_provider_marked_spam , fallback_media , contract_id + , metadata ) ( select unnest($1::varchar[]) as id , now() @@ -85,6 +86,7 @@ with token_definitions_insert as ( , unnest($8::bool[]) as is_provider_marked_spam , unnest($9::jsonb[]) as fallback_media , unnest($10::varchar[]) as contract_id + , unnest($11::jsonb[]) as metadata ) on conflict (chain, contract_id, token_id) where deleted = false do update set @@ -93,8 +95,10 @@ with token_definitions_insert as ( , description = coalesce(nullif(excluded.description, ''), nullif(description, '')) , external_url = coalesce(nullif(excluded.external_url, ''), nullif(external_url, '')) , is_provider_marked_spam = excluded.is_provider_marked_spam + -- Maybe smarter update logic for fallback media and metadata? , fallback_media = excluded.fallback_media - returning id, created_at, last_updated, deleted, name, description, token_type, token_id, external_url, chain, is_provider_marked_spam, fallback_media, contract_id, token_media_id + , metadata = excluded.metadata + returning id, created_at, last_updated, deleted, name, description, token_type, token_id, external_url, chain, is_provider_marked_spam, metadata, fallback_media, contract_address, contract_id, token_media_id ) , tokens_insert as ( insert into tokens @@ -125,47 +129,47 @@ with token_definitions_insert as ( , collectors_note , token_id , quantity - , case when $11::bool then ownership_history[ownership_history_start_idx::int:ownership_history_end_idx::int] else '{}' end + , case when $12::bool then ownership_history[ownership_history_start_idx::int:ownership_history_end_idx::int] else '{}' end , block_number , owner_user_id - , case when $11 then owned_by_wallets[owned_by_wallets_start_idx::int:owned_by_wallets_end_idx::int] else '{}' end - , case when $12::bool then is_creator_token else false end + , case when $12 then owned_by_wallets[owned_by_wallets_start_idx::int:owned_by_wallets_end_idx::int] else '{}' end + , case when $13::bool then is_creator_token else false end , chain , contract_id , now() from ( - select unnest($13::varchar[]) as id - , unnest($14::int[]) as version - , unnest($15::varchar[]) as collectors_note - , unnest($16::varchar[]) as quantity - , $17::jsonb[] as ownership_history - , unnest($18::int[]) as ownership_history_start_idx - , unnest($19::int[]) as ownership_history_end_idx - , unnest($20::bigint[]) as block_number - , unnest($21::varchar[]) as owner_user_id - , $22::varchar[] as owned_by_wallets - , unnest($23::int[]) as owned_by_wallets_start_idx - , unnest($24::int[]) as owned_by_wallets_end_idx - , unnest($25::bool[]) as is_creator_token - , unnest($26::varchar[]) as token_id - , unnest($27::varchar[]) as contract_id - , unnest($28::int[]) as chain + select unnest($14::varchar[]) as id + , unnest($15::int[]) as version + , unnest($16::varchar[]) as collectors_note + , unnest($17::varchar[]) as quantity + , $18::jsonb[] as ownership_history + , unnest($19::int[]) as ownership_history_start_idx + , unnest($20::int[]) as ownership_history_end_idx + , unnest($21::bigint[]) as block_number + , unnest($22::varchar[]) as owner_user_id + , $23::varchar[] as owned_by_wallets + , unnest($24::int[]) as owned_by_wallets_start_idx + , unnest($25::int[]) as owned_by_wallets_end_idx + , unnest($26::bool[]) as is_creator_token + , unnest($27::varchar[]) as token_id + , unnest($28::varchar[]) as contract_id + , unnest($29::int[]) as chain ) bulk_upsert join token_definitions on (bulk_upsert.chain, bulk_upsert.contract_id, bulk_upsert.token_id) = (token_definitions.chain, token_definitions.contract_id, token_definitions.token_id) ) on conflict (owner_user_id, token_definition_id) where deleted = false do update set quantity = excluded.quantity - , owned_by_wallets = case when $11 then excluded.owned_by_wallets else tokens.owned_by_wallets end - , ownership_history = case when $11 then tokens.ownership_history || excluded.ownership_history else tokens.ownership_history end - , is_creator_token = case when $12 then excluded.is_creator_token else tokens.is_creator_token end + , owned_by_wallets = case when $12 then excluded.owned_by_wallets else tokens.owned_by_wallets end + , ownership_history = case when $12 then tokens.ownership_history || excluded.ownership_history else tokens.ownership_history end + , is_creator_token = case when $13 then excluded.is_creator_token else tokens.is_creator_token end , block_number = excluded.block_number , version = excluded.version , last_updated = excluded.last_updated , last_synced = greatest(excluded.last_synced,tokens.last_synced) returning id, deleted, version, created_at, last_updated, name__deprecated, description__deprecated, collectors_note, token_uri__deprecated, token_type__deprecated, token_id, quantity, ownership_history__deprecated, external_url__deprecated, block_number, owner_user_id, owned_by_wallets, chain, contract_id, is_user_marked_spam, is_provider_marked_spam__deprecated, last_synced, fallback_media__deprecated, token_media_id__deprecated, is_creator_token, is_holder_token, displayable, token_definition_id ) -select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.fallback_media, token_definitions.contract_id, token_definitions.token_media_id, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id, token_medias.token_id, token_medias.chain, token_medias.active, token_medias.metadata, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted +select tokens.id, tokens.deleted, tokens.version, tokens.created_at, tokens.last_updated, tokens.name__deprecated, tokens.description__deprecated, tokens.collectors_note, tokens.token_uri__deprecated, tokens.token_type__deprecated, tokens.token_id, tokens.quantity, tokens.ownership_history__deprecated, tokens.external_url__deprecated, tokens.block_number, tokens.owner_user_id, tokens.owned_by_wallets, tokens.chain, tokens.contract_id, tokens.is_user_marked_spam, tokens.is_provider_marked_spam__deprecated, tokens.last_synced, tokens.fallback_media__deprecated, tokens.token_media_id__deprecated, tokens.is_creator_token, tokens.is_holder_token, tokens.displayable, tokens.token_definition_id, token_definitions.id, token_definitions.created_at, token_definitions.last_updated, token_definitions.deleted, token_definitions.name, token_definitions.description, token_definitions.token_type, token_definitions.token_id, token_definitions.external_url, token_definitions.chain, token_definitions.is_provider_marked_spam, token_definitions.metadata, token_definitions.fallback_media, token_definitions.contract_address, token_definitions.contract_id, token_definitions.token_media_id, contracts.id, contracts.deleted, contracts.version, contracts.created_at, contracts.last_updated, contracts.name, contracts.symbol, contracts.address, contracts.creator_address, contracts.chain, contracts.profile_banner_url, contracts.profile_image_url, contracts.badge_url, contracts.description, contracts.owner_address, contracts.is_provider_marked_spam, contracts.parent_id, contracts.override_creator_user_id, token_medias.id, token_medias.created_at, token_medias.last_updated, token_medias.version, token_medias.contract_id__deprecated, token_medias.token_id__deprecated, token_medias.chain__deprecated, token_medias.active, token_medias.metadata__deprecated, token_medias.media, token_medias.name__deprecated, token_medias.description__deprecated, token_medias.processing_job_id, token_medias.deleted from tokens_insert tokens join token_definitions_insert token_definitions on tokens.token_definition_id = token_definitions.id join contracts on token_definitions.contract_id = contracts.id @@ -183,6 +187,7 @@ type UpsertTokensParams struct { DefinitionIsProviderMarkedSpam []bool `json:"definition_is_provider_marked_spam"` DefinitionFallbackMedia []pgtype.JSONB `json:"definition_fallback_media"` DefinitionContractID []string `json:"definition_contract_id"` + DefinitionMetadata []pgtype.JSONB `json:"definition_metadata"` SetHolderFields bool `json:"set_holder_fields"` SetCreatorFields bool `json:"set_creator_fields"` TokenDbid []string `json:"token_dbid"` @@ -222,6 +227,7 @@ func (q *Queries) UpsertTokens(ctx context.Context, arg UpsertTokensParams) ([]U arg.DefinitionIsProviderMarkedSpam, arg.DefinitionFallbackMedia, arg.DefinitionContractID, + arg.DefinitionMetadata, arg.SetHolderFields, arg.SetCreatorFields, arg.TokenDbid, @@ -288,7 +294,9 @@ func (q *Queries) UpsertTokens(ctx context.Context, arg UpsertTokensParams) ([]U &i.TokenDefinition.ExternalUrl, &i.TokenDefinition.Chain, &i.TokenDefinition.IsProviderMarkedSpam, + &i.TokenDefinition.Metadata, &i.TokenDefinition.FallbackMedia, + &i.TokenDefinition.ContractAddress, &i.TokenDefinition.ContractID, &i.TokenDefinition.TokenMediaID, &i.Contract.ID, @@ -313,11 +321,11 @@ func (q *Queries) UpsertTokens(ctx context.Context, arg UpsertTokensParams) ([]U &i.TokenMedia.CreatedAt, &i.TokenMedia.LastUpdated, &i.TokenMedia.Version, - &i.TokenMedia.ContractID, - &i.TokenMedia.TokenID, - &i.TokenMedia.Chain, + &i.TokenMedia.ContractIDDeprecated, + &i.TokenMedia.TokenIDDeprecated, + &i.TokenMedia.ChainDeprecated, &i.TokenMedia.Active, - &i.TokenMedia.Metadata, + &i.TokenMedia.MetadataDeprecated, &i.TokenMedia.Media, &i.TokenMedia.NameDeprecated, &i.TokenMedia.DescriptionDeprecated, diff --git a/db/migrations/core/000111_add_token_definitions.up.sql b/db/migrations/core/000111_add_token_definitions.up.sql index b18e4de25..9578771fc 100644 --- a/db/migrations/core/000111_add_token_definitions.up.sql +++ b/db/migrations/core/000111_add_token_definitions.up.sql @@ -10,9 +10,12 @@ create table if not exists token_definitions ( external_url character varying, chain integer, is_provider_marked_spam boolean not null default false, + metadata jsonb, fallback_media jsonb, - contract_id character varying(255) not null references contracts(id), - token_media_id character varying(255) references token_media(id) + contract_address character varying(255) not null, + contract_id character varying(255), + token_media_id character varying(255) references token_media(id), + foreign key(contract_id, chain, contract_address) references contacts(id, chain, contract_address) ); create unique index if not exists token_definitions_chain_contract_token_idx on token_definitions(chain, contract_id, token_id) where not deleted; create index token_definitions_chain_contract_token_idx on token_definitions(chain, contract_id, token_id) where not deleted; @@ -32,7 +35,8 @@ alter table tokens rename column token_media_id to token_media_id__deprecated; alter table token_medias rename column name to name__deprecated; alter table token_medias rename column description to description__deprecated; +alter table token_medias rename column metadata to metadata__deprecated; -- XXX: Remove me --- alter table token_medias rename column contract_id to contract_id__deprecated; --- alter table token_medias rename column token_id to token_id__deprecated; --- alter table token_medias rename column chain to chain__deprecated; +alter table token_medias rename column contract_id to contract_id__deprecated; +alter table token_medias rename column token_id to token_id__deprecated; +alter table token_medias rename column chain to chain__deprecated; diff --git a/db/queries/core/query.sql b/db/queries/core/query.sql index 7fc1355b7..0075bb6c8 100644 --- a/db/queries/core/query.sql +++ b/db/queries/core/query.sql @@ -91,12 +91,14 @@ SELECT c.* FROM galleries g, unnest(g.collections) -- name: GetTokenById :one select * from tokens where id = $1 and displayable and deleted = false; +-- name: GetTokenDefinitionById :one +select * from token_definitions where id = $1 and not deleted; + -- name: GetTokenDefinitionByTokenDbid :one select token_definitions.* from token_definitions, tokens where token_definitions.id = tokens.token_definition_id and tokens.id = $1 - and tokens.displayable and not tokens.deleted and not token_definitions.deleted; @@ -126,16 +128,6 @@ where token_definitions.chain = @chain and not token_definitions.deleted and not token_medias.deleted; --- name: GetTokenDefinitionAndContractByTokenIdentifiers :one -select sqlc.embed(token_definitions), sqlc.embed(contracts) -from token_definitions, contracts -where contracts.chain = @chain - and contracts.address = @contract_address - and token_definitions.token_id = @token_id - and not token_definitions.deleted - and not token_medias.deleted - and not contracts.deleted; - -- name: GetTokenFullDetailsByTokenDbid :one select sqlc.embed(tokens), sqlc.embed(token_definitions), sqlc.embed(token_medias), sqlc.embed(contracts) from tokens @@ -162,6 +154,15 @@ left join token_medias on token_definitions.token_media_id = token_medias.id where contracts.id = $1 and tokens.displayable and not tokens.deleted and not token_definitions.deleted and not contracts.deleted order by tokens.block_number desc; +-- name: GetTokenFullDetailsByTokenDefinitionId :many +select sqlc.embed(tokens), sqlc.embed(token_definitions), sqlc.embed(token_medias), sqlc.embed(contracts) +from tokens +join token_definitions on tokens.token_definition_id = token_definitions.id +join contracts on token_definitions.contract_id = contracts.id +left join token_medias on token_definitions.token_media_id = token_medias.id +where token_definitions.id = $1 and tokens.displayable and not tokens.deleted and not token_definitions.deleted and not contracts.deleted +order by tokens.block_number desc; + -- name: UpdateTokenCollectorsNoteByTokenDbidUserId :exec update tokens set collectors_note = $1, last_updated = now() where id = $2 and owner_user_id = $3; @@ -272,9 +273,6 @@ select * FROM contracts WHERE address = $1 AND chain = $2 AND deleted = false; -- name: GetContractByChainAddressBatch :batchone select * FROM contracts WHERE address = $1 AND chain = $2 AND deleted = false; --- name: GetContractByTokenDefinitionId :one -select * from contracts where contracts.id = (select contract_id from token_definitions where token_definitions.id = $1 and not token_definitions.deleted) and not contracts.deleted = false; - -- name: GetContractsDisplayedByUserIDBatch :batchmany with last_refreshed as ( select last_updated from owned_contracts limit 1 @@ -1302,88 +1300,89 @@ update wallets set deleted = true, last_updated = now() where id = $1; insert into users (id, username, username_idempotent, bio, universal, email_unsubscriptions) values ($1, $2, $3, $4, $5, $6) returning id; -- name: InsertTokenPipelineResults :exec -with insert_job(id) as ( - insert into token_processing_jobs (id, token_properties, pipeline_metadata, processing_cause, processor_version) - values (@processing_job_id, @token_properties, @pipeline_metadata, @processing_cause, @processor_version) - returning id -), --- Optionally create an inactive record of the existing active record if the new media is also active -insert_media_move_active_record(last_updated) as ( - insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) - ( - select @retired_media_id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, false, created_at, now() - from token_medias - where contract_id = @contract_id - and token_id = @token_id - and chain = @chain - and active - and not deleted - and @active = true - limit 1 - ) - returning last_updated -), --- Update the existing active record with the new media data -insert_media_add_record(insert_id, active, replaced_current) as ( - insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) - values (@new_media_id, @contract_id, @token_id, @chain, @metadata, @media, @name, @description, (select id from insert_job), @active, - -- Using timestamps generated from insert_media_move_active_record ensures that the new record is only inserted after the current media is moved - (select coalesce((select last_updated from insert_media_move_active_record), now())), - (select coalesce((select last_updated from insert_media_move_active_record), now())) - ) - on conflict (contract_id, token_id, chain) where active and not deleted do update - set metadata = excluded.metadata, - media = excluded.media, - name = coalesce(nullif(excluded.name, ''), token_medias.name), - description = coalesce(nullif(excluded.description, ''), token_medias.description), - processing_job_id = excluded.processing_job_id, - last_updated = now() - returning id as insert_id, active, id = @new_media_id replaced_current -), --- This will return the existing active record if it exists. If the incoming record is active, --- this will still return the active record before the update, and not the new record. -existing_active(id) as ( - select id - from token_medias - where chain = @chain and contract_id = @contract_id and token_id = @token_id and active and not deleted - limit 1 -) -update tokens -set token_media_id = ( - case - -- The pipeline didn't produce active media, but one already exists so use that one - when not insert_medias.active and (select id from existing_active) is not null - then (select id from existing_active) - - -- The pipeline produced active media, or didn't produce active media but no active media existed before - else insert_medias.insert_id - end -), name = coalesce(nullif(@name, ''), tokens.name), description = coalesce(nullif(@description, ''), tokens.description), last_updated = now() -- update the duplicate fields on the token in the meantime before we get rid of these fields -from insert_media_add_record insert_medias -where - tokens.chain = @chain - and tokens.contract_id = @contract_id - and tokens.token_id = @token_id - and not tokens.deleted - and ( - -- The case statement below handles which token instances get updated: - case - -- If the active media already existed, update tokens that have no media (new tokens that haven't been processed before) or tokens that don't use this media yet - when insert_medias.active and not insert_medias.replaced_current - then (tokens.token_media_id is null or tokens.token_media_id != insert_medias.insert_id) - - -- Brand new active media, update all tokens in the filter to use this media - when insert_medias.active and insert_medias.replaced_current - then 1 = 1 - - -- The pipeline run produced inactive media, only update the token instance (since it may have not been processed before) - -- Since there is no db constraint on inactive media, all inactive media is new - when not insert_medias.active - then tokens.id = @token_dbid - - else 1 = 1 - end - ); +-- with insert_job(id) as ( +-- insert into token_processing_jobs (id, token_properties, pipeline_metadata, processing_cause, processor_version) +-- values (@processing_job_id, @token_properties, @pipeline_metadata, @processing_cause, @processor_version) +-- returning id +-- ), +-- -- Optionally create an inactive record of the existing active record if the new media is also active +-- insert_media_move_active_record(last_updated) as ( +-- insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) +-- ( +-- select @retired_media_id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, false, created_at, now() +-- from token_medias +-- where contract_id = @contract_id +-- and token_id = @token_id +-- and chain = @chain +-- and active +-- and not deleted +-- and @active = true +-- limit 1 +-- ) +-- returning last_updated +-- ), +-- -- Update the existing active record with the new media data +-- insert_media_add_record(insert_id, active, replaced_current) as ( +-- insert into token_medias (id, contract_id, token_id, chain, metadata, media, name, description, processing_job_id, active, created_at, last_updated) +-- values (@new_media_id, @contract_id, @token_id, @chain, @metadata, @media, @name, @description, (select id from insert_job), @active, +-- -- Using timestamps generated from insert_media_move_active_record ensures that the new record is only inserted after the current media is moved +-- (select coalesce((select last_updated from insert_media_move_active_record), now())), +-- (select coalesce((select last_updated from insert_media_move_active_record), now())) +-- ) +-- on conflict (contract_id, token_id, chain) where active and not deleted do update +-- set metadata = excluded.metadata, +-- media = excluded.media, +-- name = coalesce(nullif(excluded.name, ''), token_medias.name), +-- description = coalesce(nullif(excluded.description, ''), token_medias.description), +-- processing_job_id = excluded.processing_job_id, +-- last_updated = now() +-- returning id as insert_id, active, id = @new_media_id replaced_current +-- ), +-- -- This will return the existing active record if it exists. If the incoming record is active, +-- -- this will still return the active record before the update, and not the new record. +-- existing_active(id) as ( +-- select id +-- from token_medias +-- where chain = @chain and contract_id = @contract_id and token_id = @token_id and active and not deleted +-- limit 1 +-- ) +-- update tokens +-- set token_media_id = ( +-- case +-- -- The pipeline didn't produce active media, but one already exists so use that one +-- when not insert_medias.active and (select id from existing_active) is not null +-- then (select id from existing_active) +-- +-- -- The pipeline produced active media, or didn't produce active media but no active media existed before +-- else insert_medias.insert_id +-- end +-- ), name = coalesce(nullif(@name, ''), tokens.name), description = coalesce(nullif(@description, ''), tokens.description), last_updated = now() -- update the duplicate fields on the token in the meantime before we get rid of these fields +-- from insert_media_add_record insert_medias +-- where +-- tokens.chain = @chain +-- and tokens.contract_id = @contract_id +-- and tokens.token_id = @token_id +-- and not tokens.deleted +-- and ( +-- -- The case statement below handles which token instances get updated: +-- case +-- -- If the active media already existed, update tokens that have no media (new tokens that haven't been processed before) or tokens that don't use this media yet +-- when insert_medias.active and not insert_medias.replaced_current +-- then (tokens.token_media_id is null or tokens.token_media_id != insert_medias.insert_id) +-- +-- -- Brand new active media, update all tokens in the filter to use this media +-- when insert_medias.active and insert_medias.replaced_current +-- then 1 = 1 +-- +-- -- The pipeline run produced inactive media, only update the token instance (since it may have not been processed before) +-- -- Since there is no db constraint on inactive media, all inactive media is new +-- when not insert_medias.active +-- then tokens.id = @token_dbid +-- +-- else 1 = 1 +-- end +-- ); +select 'implement me'; -- name: InsertSpamContracts :exec with insert_spam_contracts as ( @@ -1497,24 +1496,12 @@ from token_medias m where m.id = (select token_media_id from tokens where tokens.id = $1) and not m.deleted; -- name: GetMediaByUserTokenIdentifiers :one -with contract as ( - select * from contracts where contracts.chain = @chain and contracts.address = @address and not contracts.deleted -), -matching_media as ( - select token_medias.* - from token_medias, contract - where token_medias.contract_id = contract.id and token_medias.chain = @chain and token_medias.token_id = @token_id and not token_medias.deleted - order by token_medias.active desc, token_medias.last_updated desc - limit 1 -), -matched_token(id) as ( - select tokens.id - from tokens, contract, matching_media - where tokens.contract = contract.id and tokens.chain = @chain and tokens.token_id = @token_id and not tokens.deleted - order by tokens.owner_user_id = @user_id desc, tokens.token_media_id = matching_media.id desc, tokens.last_updated desc - limit 1 -) -select sqlc.embed(token_medias), (select id from matched_token) token_instance_id from matching_media token_medias; +select token_medias.* +from token_medias +join token_definitions on token_definitions.token_media_id = token_medias.id +where (token_definitions.chain, token_definitions.contract_address, token_definitions.token_id) = (@chain, @contract_address, @token_ud) + and not token_definitions.deleted +order by token_medias.active desc, token_medias.last_updated desc; -- name: GetFallbackTokenByUserTokenIdentifiers :one with contract as ( diff --git a/db/queries/core/token_upsert.sql b/db/queries/core/token_upsert.sql index 6c9e51681..1491397bc 100644 --- a/db/queries/core/token_upsert.sql +++ b/db/queries/core/token_upsert.sql @@ -15,6 +15,7 @@ with token_definitions_insert as ( , is_provider_marked_spam , fallback_media , contract_id + , metadata ) ( select unnest(@definition_dbid::varchar[]) as id , now() @@ -29,6 +30,7 @@ with token_definitions_insert as ( , unnest(@definition_is_provider_marked_spam::bool[]) as is_provider_marked_spam , unnest(@definition_fallback_media::jsonb[]) as fallback_media , unnest(@definition_contract_id::varchar[]) as contract_id + , unnest(@definition_metadata::jsonb[]) as metadata ) on conflict (chain, contract_id, token_id) where deleted = false do update set @@ -37,7 +39,9 @@ with token_definitions_insert as ( , description = coalesce(nullif(excluded.description, ''), nullif(description, '')) , external_url = coalesce(nullif(excluded.external_url, ''), nullif(external_url, '')) , is_provider_marked_spam = excluded.is_provider_marked_spam + -- Maybe smarter update logic for fallback media and metadata? , fallback_media = excluded.fallback_media + , metadata = excluded.metadata returning * ) , tokens_insert as ( diff --git a/service/multichain/multichain.go b/service/multichain/multichain.go index a4a439ef1..241e3c8f5 100644 --- a/service/multichain/multichain.go +++ b/service/multichain/multichain.go @@ -863,7 +863,7 @@ func (c combinedProviderChildContractResults) ParentContracts() []db.Contract { contracts: contracts, }) } - return contractsToUpsertableContracts(combined, nil) + return chainContractsToUpsertableContracts(combined, nil) } // SyncTokensCreatedOnSharedContracts queries each provider to identify contracts created by the given user. @@ -953,8 +953,8 @@ func (p *Provider) processTokensForUsers(ctx context.Context, users map[persist. upsertableTokens := make([]db.Token, 0) for userID, user := range users { - tokens := tokensToUpsertableTokens(chainTokensForUsers[userID], contracts, user) - definitions := tokensToUpsertableTokenDefinitions(chainTokensForUsers[userID], contracts) + tokens := chainTokensToUpsertableTokens(chainTokensForUsers[userID], contracts, user) + definitions := chainTokensToUpsertableTokenDefinitions(chainTokensForUsers[userID], contracts) upsertableTokens = append(upsertableTokens, tokens...) upsertableDefinitions = append(upsertableDefinitions, definitions...) } @@ -978,20 +978,20 @@ func (p *Provider) processTokensForUsers(ctx context.Context, users map[persist. // Create a lookup for userID to persisted token IDs currentUserTokens = make(map[persist.DBID][]postgres.TokenFullDetails) for _, token := range upsertedTokens { - currentUserTokens[token.Token.OwnerUserID] = append(currentUserTokens[token.Token.OwnerUserID], token) + currentUserTokens[token.Instance.OwnerUserID] = append(currentUserTokens[token.Instance.OwnerUserID], token) } newUserTokens = make(map[persist.DBID][]postgres.TokenFullDetails, len(users)) for userID := range users { currentTokensForUser := currentUserTokens[userID] - newPersistedTokens := util.Filter(currentTokensForUser, func(t postgres.TokenFullDetails) bool { return t.Token.CreatedAt.Equal(t.Token.LastUpdated) }, false) + newPersistedTokens := util.Filter(currentTokensForUser, func(t postgres.TokenFullDetails) bool { return t.Instance.CreatedAt.Equal(t.Instance.LastUpdated) }, false) newUserTokens[userID] = newPersistedTokens } for userID := range users { // include the existing tokens that were not persisted with the bulk upsert - currentUserTokens[userID] = util.DedupeWithTranslate(append(currentUserTokens[userID], existingTokensForUsers[userID]...), false, func(t postgres.TokenFullDetails) persist.DBID { return t.Token.ID }) + currentUserTokens[userID] = util.DedupeWithTranslate(append(currentUserTokens[userID], existingTokensForUsers[userID]...), false, func(t postgres.TokenFullDetails) persist.DBID { return t.Instance.ID }) } // Submit tokens that are missing media IDs. Tokens that are missing media IDs are new tokens, or tokens that weren't processed for whatever reason. @@ -1075,7 +1075,7 @@ func (p *Provider) processTokensForOwnersOfContract(ctx context.Context, contrac existingTokensForUsers := make(map[persist.DBID][]postgres.TokenFullDetails) for _, t := range existingTokens { - existingTokensForUsers[t.Token.OwnerUserID] = append(existingTokensForUsers[t.Token.OwnerUserID], t) + existingTokensForUsers[t.Instance.OwnerUserID] = append(existingTokensForUsers[t.Instance.OwnerUserID], t) } return p.processTokensForUsers(ctx, users, chainTokensForUsers, existingTokensForUsers, []db.Contract{contract}, upsertParams) @@ -1832,7 +1832,7 @@ func (p *Provider) matchingWalletsChain(wallets []persist.Wallet, chain persist. // the owner address of an existing contract will be overwritten if the new contract provides a non-empty owner address. // An empty owner address will never overwrite an existing address, even if canOverwriteOwnerAddress is true. func (d *Provider) processContracts(ctx context.Context, contractsFromProviders []chainContracts, existingContracts []db.Contract, canOverwriteOwnerAddress bool) (currentContractState []db.Contract, newContracts []db.Contract, err error) { - contractsToUpsert := contractsToUpsertableContracts(contractsFromProviders, existingContracts) + contractsToUpsert := chainContractsToUpsertableContracts(contractsFromProviders, existingContracts) newUpsertedContracts, err := d.Repos.ContractRepository.BulkUpsert(ctx, contractsToUpsert, canOverwriteOwnerAddress) if err != nil { return nil, nil, err @@ -1841,8 +1841,8 @@ func (d *Provider) processContracts(ctx context.Context, contractsFromProviders return util.DedupeWithTranslate(append(newUpsertedContracts, existingContracts...), false, func(c db.Contract) persist.DBID { return c.ID }), newUpsertedContracts, nil } -// tokensToUpsertableTokenDefinitions returns a slice of token definitions that are ready to be upserted into the database from a slice of chainTokens. -func tokensToUpsertableTokenDefinitions(chainTokens []chainTokens, existingContracts []db.Contract) []db.TokenDefinition { +// chainTokensToUpsertableTokenDefinitions returns a slice of token definitions that are ready to be upserted into the database from a slice of chainTokens. +func chainTokensToUpsertableTokenDefinitions(chainTokens []chainTokens, existingContracts []db.Contract) []db.TokenDefinition { definitions := make(map[persist.TokenIdentifiers]db.TokenDefinition) // Create a lookup of contracts to their IDs @@ -1870,7 +1870,9 @@ func tokensToUpsertableTokenDefinitions(chainTokens []chainTokens, existingContr ExternalUrl: util.ToNullString(token.ExternalURL, true), Chain: chainToken.chain, IsProviderMarkedSpam: util.GetOptionalValue(token.IsSpam, false), + Metadata: token.TokenMetadata, FallbackMedia: token.FallbackMedia, + ContractAddress: token.ContractAddress, ContractID: contractID, TokenMediaID: "", // Upsert will set this if the definition already exists } @@ -1880,11 +1882,13 @@ func tokensToUpsertableTokenDefinitions(chainTokens []chainTokens, existingContr description := util.FirstNonEmptyString(definition.Description.String, token.Descriptors.Description) externalURL := util.FirstNonEmptyString(definition.ExternalUrl.String, token.ExternalURL) fallbackMedia, _ := util.FindFirst([]persist.FallbackMedia{definition.FallbackMedia, token.FallbackMedia}, func(m persist.FallbackMedia) bool { return m.IsServable() }) + metadata, _ := util.FindFirst([]persist.TokenMetadata{definition.Metadata, token.TokenMetadata}, func(m persist.TokenMetadata) bool { return len(m) > 0 }) + definition.Name = util.ToNullString(name, true) definition.Description = util.ToNullString(description, true) definition.ExternalUrl = util.ToNullString(externalURL, true) definition.FallbackMedia = fallbackMedia - // If any provider reports the token as spam, mark it as spam + definition.Metadata = metadata definition.IsProviderMarkedSpam = definition.IsProviderMarkedSpam || util.GetOptionalValue(token.IsSpam, false) definitions[tokenIdentifiers] = definition } @@ -1899,8 +1903,8 @@ func tokensToUpsertableTokenDefinitions(chainTokens []chainTokens, existingContr return tokenDefinitions } -// tokensToUpsertableTokens returns a unique slice of tokens that are ready to be upserted into the database. -func tokensToUpsertableTokens(tokens []chainTokens, existingContracts []db.Contract, ownerUser persist.User) []db.Token { +// chainTokensToUpsertableTokens returns a unique slice of tokens that are ready to be upserted into the database. +func chainTokensToUpsertableTokens(tokens []chainTokens, existingContracts []db.Contract, ownerUser persist.User) []db.Token { addressToContract := make(map[string]db.Contract) util.Map(existingContracts, func(c db.Contract) (any, error) { @@ -2021,7 +2025,7 @@ type contractMetadata struct { } // contractsToUpsertableContracts returns a unique slice of contracts that are ready to be upserted into the database. -func contractsToUpsertableContracts(contracts []chainContracts, existingContracts []db.Contract) []db.Contract { +func chainContractsToUpsertableContracts(contracts []chainContracts, existingContracts []db.Contract) []db.Contract { contractMetadatas := map[persist.ChainAddress]contractMetadata{} existingMetadatas := map[persist.ChainAddress]contractMetadata{} diff --git a/service/persist/postgres/postgres.go b/service/persist/postgres/postgres.go index 4d13342ae..823569e95 100644 --- a/service/persist/postgres/postgres.go +++ b/service/persist/postgres/postgres.go @@ -323,7 +323,7 @@ func NewRepositories(pq *sql.DB, pgx *pgxpool.Pool) *Repositories { pool: pgx, UserRepository: NewUserRepository(pq, queries, pgx), NonceRepository: NewNonceRepository(pq, queries), - TokenRepository: NewTokenFullDetailsRepository(pq, queries), + TokenRepository: NewTokenFullDetailsRepository(queries), CollectionRepository: NewCollectionTokenRepository(pq, queries), GalleryRepository: NewGalleryRepository(queries), ContractRepository: NewContractGalleryRepository(pq, queries), diff --git a/service/persist/postgres/token_full_details.go b/service/persist/postgres/token_full_details.go index 5ef3586ce..f30130ec6 100644 --- a/service/persist/postgres/token_full_details.go +++ b/service/persist/postgres/token_full_details.go @@ -5,8 +5,6 @@ import ( "database/sql" "time" - "github.com/lib/pq" - db "github.com/mikeydub/go-gallery/db/gen/coredb" "github.com/mikeydub/go-gallery/service/logger" "github.com/mikeydub/go-gallery/service/persist" @@ -14,59 +12,18 @@ import ( ) type TokenFullDetails struct { - Token db.Token + Instance db.Token Contract db.Contract Definition db.TokenDefinition Media db.TokenMedia } -func (t TokenFullDetails) TokenIdentifiers() persist.TokenIdentifiers { - return persist.NewTokenIdentifiers(t.Contract.Address, t.Token.TokenID, t.Token.Chain) -} - type TokenFullDetailsRepository struct { - db *sql.DB - queries *db.Queries - getByTokenIdentifiersStmt *sql.Stmt - getByTokenIdentifiersPaginateStmt *sql.Stmt - getContractByAddressStmt *sql.Stmt + queries *db.Queries } -func NewTokenFullDetailsRepository(db *sql.DB, queries *db.Queries) *TokenFullDetailsRepository { - ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) - defer cancel() - getByTokenIdentifiersStmt, err := db.PrepareContext(ctx, ` - SELECT tokens.ID,tokens.COLLECTORS_NOTE,token_medias.MEDIA as token_media,tokens.TOKEN_MEDIA_ID,tokens.TOKEN_TYPE,tokens.CHAIN,token_medias.NAME,token_medias.DESCRIPTION,tokens.TOKEN_ID,tokens.TOKEN_URI,tokens.QUANTITY,tokens.OWNER_USER_ID,tokens.OWNED_BY_WALLETS,tokens.OWNERSHIP_HISTORY,token_medias.METADATA as token_metadata,tokens.EXTERNAL_URL,tokens.BLOCK_NUMBER,tokens.VERSION,tokens.CREATED_AT,tokens.LAST_UPDATED,tokens.IS_USER_MARKED_SPAM,tokens.IS_PROVIDER_MARKED_SPAM,contracts.ID,contracts.DELETED,contracts.VERSION,contracts.CREATED_AT,contracts.LAST_UPDATED,contracts.NAME,contracts.SYMBOL,contracts.ADDRESS,contracts.CREATOR_ADDRESS,contracts.CHAIN,contracts.PROFILE_BANNER_URL,contracts.PROFILE_IMAGE_URL,contracts.BADGE_URL,contracts.DESCRIPTION,contracts.OWNER_ADDRESS,contracts.IS_PROVIDER_MARKED_SPAM,contracts.PARENT_ID,contracts.OVERRIDE_CREATOR_USER_ID - FROM tokens - LEFT JOIN token_medias ON token_medias.ID = tokens.TOKEN_MEDIA_ID - JOIN contracts ON tokens.contract = contracts.ID - WHERE tokens.TOKEN_ID = $1 AND CONTRACT = $2 AND DISPLAYABLE AND tokens.DELETED = false AND contracts.deleted = false - ORDER BY BLOCK_NUMBER DESC; - `) - checkNoErr(err) - - getByTokenIdentifiersPaginateStmt, err := db.PrepareContext(ctx, ` - SELECT tokens.ID,tokens.COLLECTORS_NOTE,token_medias.MEDIA as token_media,tokens.TOKEN_MEDIA_ID,tokens.TOKEN_TYPE,tokens.CHAIN,token_medias.NAME,token_medias.DESCRIPTION,tokens.TOKEN_ID,tokens.TOKEN_URI,tokens.QUANTITY,tokens.OWNER_USER_ID,tokens.OWNED_BY_WALLETS,tokens.OWNERSHIP_HISTORY,token_medias.METADATA as token_metadata,tokens.EXTERNAL_URL,tokens.BLOCK_NUMBER,tokens.VERSION,tokens.CREATED_AT,tokens.LAST_UPDATED,tokens.IS_USER_MARKED_SPAM,tokens.IS_PROVIDER_MARKED_SPAM,contracts.ID,contracts.DELETED,contracts.VERSION,contracts.CREATED_AT,contracts.LAST_UPDATED,contracts.NAME,contracts.SYMBOL,contracts.ADDRESS,contracts.CREATOR_ADDRESS,contracts.CHAIN,contracts.PROFILE_BANNER_URL,contracts.PROFILE_IMAGE_URL,contracts.BADGE_URL,contracts.DESCRIPTION,contracts.OWNER_ADDRESS,contracts.IS_PROVIDER_MARKED_SPAM,contracts.PARENT_ID,contracts.OVERRIDE_CREATOR_USER_ID - FROM tokens - LEFT JOIN token_medias ON token_medias.ID = tokens.TOKEN_MEDIA_ID - JOIN contracts ON tokens.contract = contracts.ID - WHERE tokens.TOKEN_ID = $1 AND CONTRACT = $2 AND DISPLAYABLE AND tokens.DELETED = false AND contracts.deleted = false - ORDER BY BLOCK_NUMBER DESC - LIMIT $3 OFFSET $4; - `) - checkNoErr(err) - - getContractByAddressStmt, err := db.PrepareContext(ctx, `SELECT ID FROM contracts WHERE ADDRESS = $1 AND CHAIN = $2 AND DELETED = false;`) - checkNoErr(err) - - return &TokenFullDetailsRepository{ - db: db, - queries: queries, - getByTokenIdentifiersStmt: getByTokenIdentifiersStmt, - getByTokenIdentifiersPaginateStmt: getByTokenIdentifiersPaginateStmt, - getContractByAddressStmt: getContractByAddressStmt, - } - +func NewTokenFullDetailsRepository(queries *db.Queries) *TokenFullDetailsRepository { + return &TokenFullDetailsRepository{queries: queries} } // GetByID gets a token by its DBID @@ -76,7 +33,7 @@ func (t *TokenFullDetailsRepository) GetByID(ctx context.Context, tokenID persis return TokenFullDetails{}, err } return TokenFullDetails{ - Token: r.Token, + Instance: r.Token, Contract: r.Contract, Definition: r.TokenDefinition, Media: r.TokenMedia, @@ -91,7 +48,7 @@ func (t *TokenFullDetailsRepository) GetByUserID(ctx context.Context, userID per } tokens := util.MapWithoutError(r, func(r db.GetTokenFullDetailsByUserIdRow) TokenFullDetails { return TokenFullDetails{ - Token: r.Token, + Instance: r.Token, Contract: r.Contract, Definition: r.TokenDefinition, Media: r.TokenMedia, @@ -100,44 +57,20 @@ func (t *TokenFullDetailsRepository) GetByUserID(ctx context.Context, userID per return tokens, nil } -// GetByTokenIdentifiers gets a token by its token ID and contract address and chain -func (t *TokenFullDetailsRepository) GetByTokenIdentifiers(ctx context.Context, tokenID persist.TokenID, contractAddress persist.Address, chain persist.Chain) ([]TokenFullDetails, error) { - var contractID persist.DBID - err := t.getContractByAddressStmt.QueryRowContext(ctx, pContractAddress, pChain).Scan(&contractID) +// GetByDefinitionID gets a token by its definition DBID +func (t *TokenFullDetailsRepository) GetByDefinitionID(ctx context.Context, definitionID persist.DBID) ([]TokenFullDetails, error) { + r, err := t.queries.GetTokenFullDetailsByTokenDefinitionId(ctx, definitionID) if err != nil { return nil, err } - - var rows *sql.Rows - if limit > 0 { - rows, err = t.getByTokenIdentifiersPaginateStmt.QueryContext(ctx, pTokenID, contractID, limit, page*limit) - } else { - rows, err = t.getByTokenIdentifiersStmt.QueryContext(ctx, pTokenID, contractID) - } - if err != nil { - return nil, err - } - defer rows.Close() - - tokens := make([]persist.TokenGallery, 0, 10) - for rows.Next() { - token := persist.TokenGallery{} - contract := persist.ContractGallery{} - if err := rows.Scan(&token.ID, &token.CollectorsNote, &token.TokenMedia, &token.TokenMediaID, &token.TokenType, &token.Chain, &token.Name, &token.Description, &token.TokenID, &token.TokenURI, &token.Quantity, &token.OwnerUserID, pq.Array(&token.OwnedByWallets), pq.Array(&token.OwnershipHistory), &token.TokenMetadata, &token.ExternalURL, &token.BlockNumber, &token.Version, &token.CreationTime, &token.LastUpdated, &token.IsUserMarkedSpam, &token.IsProviderMarkedSpam, &contract.ID, &contract.Deleted, &contract.Version, &contract.CreationTime, &contract.LastUpdated, &contract.Name, &contract.Symbol, &contract.Address, &contract.CreatorAddress, &contract.Chain, &contract.ProfileBannerURL, &contract.ProfileImageURL, &contract.BadgeURL, &contract.Description, &contract.OwnerAddress, &contract.IsProviderMarkedSpam, &contract.ParentID, &contract.OverrideCreatorUserID); err != nil { - return nil, err + tokens := util.MapWithoutError(r, func(r db.GetTokenFullDetailsByTokenDefinitionIdRow) TokenFullDetails { + return TokenFullDetails{ + Instance: r.Token, + Contract: r.Contract, + Definition: r.TokenDefinition, + Media: r.TokenMedia, } - token.Contract = contract - tokens = append(tokens, token) - } - - if err := rows.Err(); err != nil { - return nil, err - } - - if len(tokens) == 0 { - return nil, persist.ErrTokenGalleryNotFoundByIdentifiers{TokenID: pTokenID, ContractAddress: pContractAddress, Chain: pChain} - } - + }) return tokens, nil } @@ -149,7 +82,7 @@ func (t *TokenFullDetailsRepository) GetByContractID(ctx context.Context, contra } tokens := util.MapWithoutError(r, func(r db.GetTokenFullDetailsByContractIdRow) TokenFullDetails { return TokenFullDetails{ - Token: r.Token, + Instance: r.Token, Contract: r.Contract, Definition: r.TokenDefinition, Media: r.TokenMedia, @@ -212,6 +145,9 @@ func (t *TokenFullDetailsRepository) UpsertTokens(ctx context.Context, tokens [] var errors []error + // XXX: Add address to definition + panic("ADD ADDRESS") + for i := range definitions { d := &definitions[i] params.DefinitionDbid = append(params.DefinitionDbid, persist.GenerateID().String()) @@ -223,6 +159,7 @@ func (t *TokenFullDetailsRepository) UpsertTokens(ctx context.Context, tokens [] params.DefinitionChain = append(params.DefinitionChain, int32(d.Chain)) params.DefinitionIsProviderMarkedSpam = append(params.DefinitionIsProviderMarkedSpam, d.IsProviderMarkedSpam) appendJSONB(¶ms.DefinitionFallbackMedia, d.FallbackMedia, &errors) + appendJSONB(¶ms.DefinitionMetadata, d.Metadata, &errors) params.DefinitionContractID = append(params.DefinitionContractID, d.ContractID.String()) // Defer error checking until now to keep the code above from being // littered with multiline "if" statements @@ -258,7 +195,7 @@ func (t *TokenFullDetailsRepository) UpsertTokens(ctx context.Context, tokens [] upsertedTokens := util.MapWithoutError(upserted, func(r db.UpsertTokensRow) TokenFullDetails { return TokenFullDetails{ - Token: r.Token, + Instance: r.Token, Contract: r.Contract, Definition: r.TokenDefinition, Media: r.TokenMedia, diff --git a/service/persist/token_gallery.go b/service/persist/token_gallery.go index 81c500d7c..8c60f2e42 100644 --- a/service/persist/token_gallery.go +++ b/service/persist/token_gallery.go @@ -47,12 +47,6 @@ type ContractIdentifiers struct { Chain Chain `json:"chain"` } -type ErrTokenGalleryNotFoundByIdentifiers struct { - TokenID TokenID - ContractAddress Address - Chain Chain -} - // NewTokenIdentifiers creates a new token identifiers func NewTokenIdentifiers(pContractAddress Address, pTokenID TokenID, pChain Chain) TokenIdentifiers { return TokenIdentifiers{ @@ -135,7 +129,3 @@ func (a *AddressAtBlock) Scan(src interface{}) error { func (a AddressAtBlock) Value() (driver.Value, error) { return json.Marshal(a) } - -func (e ErrTokenGalleryNotFoundByIdentifiers) Error() string { - return fmt.Sprintf("token not found with contract address %v and token ID %v", e.ContractAddress, e.TokenID) -} diff --git a/sqlc.yaml b/sqlc.yaml index baab7d935..543d2a477 100644 --- a/sqlc.yaml +++ b/sqlc.yaml @@ -196,6 +196,8 @@ sql: go_type: 'github.com/mikeydub/go-gallery/service/persist.FallbackMedia' - column: 'token_definitions.token_type' go_type: 'github.com/mikeydub/go-gallery/service/persist.TokenType' + - column: 'token_definitions.metadata' + go_type: 'github.com/mikeydub/go-gallery/service/persist.TokenMetadata' # Wildcards # Note: to override one of these wildcard entries, add a more specific entry (like some_table.id) above. diff --git a/tokenprocessing/handler.go b/tokenprocessing/handler.go index 6177d74f1..15da3847a 100644 --- a/tokenprocessing/handler.go +++ b/tokenprocessing/handler.go @@ -32,10 +32,10 @@ func handlersInitServer(ctx context.Context, router *gin.Engine, tp *tokenProces if hub := sentryutil.SentryHubFromContext(c); hub != nil { hub.Scope().AddEventProcessor(sentryutil.SpanFilterEventProcessor(c, 1000, 1*time.Millisecond, 8, true)) } - processMediaForUsersTokens(tp, repos.TokenRepository, repos.ContractRepository, syncManager)(c) + processMediaForUsersTokens(tp, repos.TokenRepository, syncManager)(c) }) - mediaGroup.POST("/process/token", processMediaForTokenIdentifiers(tp, repos.TokenRepository, repos.ContractRepository, repos.UserRepository, repos.WalletRepository, refreshManager)) - mediaGroup.POST("/tokenmanage/process/token", processMediaForTokenManaged(tp, repos.TokenRepository, repos.ContractRepository, syncManager)) + mediaGroup.POST("/process/token", processMediaForTokenIdentifiers(tp, mc.Queries, refreshManager)) + mediaGroup.POST("/tokenmanage/process/token", processMediaForTokenManaged(tp, repos.TokenRepository, syncManager)) mediaGroup.POST("/process/post-preflight", processPostPreflight(tp, syncManager, mc.Queries, mc, repos.ContractRepository, repos.UserRepository, repos.TokenRepository)) ownersGroup := router.Group("/owners") ownersGroup.POST("/process/contract", processOwnersForContractTokens(mc, repos.ContractRepository, throttler)) @@ -50,9 +50,9 @@ func handlersInitServer(ctx context.Context, router *gin.Engine, tp *tokenProces func syncMaxRetriesF(ctx context.Context, q *db.Queries) func(id persist.DBID) int { return func(id persist.DBID) int { - contract, _ := q.GetContractByTokenDefinitionId(ctx, id) - contractIdentifiers := persist.NewContractIdentifiers(contract.Address, contract.Chain) - if retries, ok := contractSpecificRetries[contractIdentifiers]; ok { + td, _ := q.GetTokenDefinitionById(ctx, id) + cID := persist.NewContractIdentifiers(td.ContractAddress, td.Chain) + if retries, ok := contractSpecificRetries[cID]; ok { return retries } return defaultSyncMaxRetries diff --git a/tokenprocessing/pipeline.go b/tokenprocessing/pipeline.go index 35bc1c726..b44043dac 100644 --- a/tokenprocessing/pipeline.go +++ b/tokenprocessing/pipeline.go @@ -8,45 +8,40 @@ import ( "time" "cloud.google.com/go/storage" - "github.com/ethereum/go-ethereum/ethclient" "github.com/everFinance/goar" shell "github.com/ipfs/go-ipfs-api" - "github.com/mikeydub/go-gallery/db/gen/coredb" + "github.com/sirupsen/logrus" + + db "github.com/mikeydub/go-gallery/db/gen/coredb" "github.com/mikeydub/go-gallery/service/logger" "github.com/mikeydub/go-gallery/service/media" "github.com/mikeydub/go-gallery/service/metric" "github.com/mikeydub/go-gallery/service/multichain" "github.com/mikeydub/go-gallery/service/persist" - "github.com/mikeydub/go-gallery/service/persist/postgres" "github.com/mikeydub/go-gallery/service/tracing" "github.com/mikeydub/go-gallery/util" - "github.com/sirupsen/logrus" ) type tokenProcessor struct { - queries *coredb.Queries - ethClient *ethclient.Client + queries *db.Queries httpClient *http.Client mc *multichain.Provider ipfsClient *shell.Shell arweaveClient *goar.Client stg *storage.Client tokenBucket string - tokenRepo *postgres.TokenGalleryRepository mr metric.MetricReporter } -func NewTokenProcessor(queries *coredb.Queries, ethClient *ethclient.Client, httpClient *http.Client, mc *multichain.Provider, ipfsClient *shell.Shell, arweaveClient *goar.Client, stg *storage.Client, tokenBucket string, tokenRepo *postgres.TokenGalleryRepository, mr metric.MetricReporter) *tokenProcessor { +func NewTokenProcessor(queries *db.Queries, httpClient *http.Client, mc *multichain.Provider, ipfsClient *shell.Shell, arweaveClient *goar.Client, stg *storage.Client, tokenBucket string, mr metric.MetricReporter) *tokenProcessor { return &tokenProcessor{ queries: queries, - ethClient: ethClient, mc: mc, httpClient: httpClient, ipfsClient: ipfsClient, arweaveClient: arweaveClient, stg: stg, tokenBucket: tokenBucket, - tokenRepo: tokenRepo, mr: mr, } } @@ -55,7 +50,7 @@ type tokenProcessingJob struct { tp *tokenProcessor id persist.DBID token persist.TokenIdentifiers - contract persist.ContractGallery + contract persist.ContractIdentifiers cause persist.ProcessingCause pipelineMetadata *persist.PipelineMetadata // profileImageKey is an optional key in the metadata that the pipeline should also process as a profile image. @@ -64,10 +59,8 @@ type tokenProcessingJob struct { profileImageKey string // refreshMetadata is an optional flag that indicates that the pipeline should check for new metadata when enabled refreshMetadata bool - // tokenInstance is an already instanced token to derive data (name, description, and metadata) from when fetching fails. - // If the job doesn't produce active media, only tokenInstance's media is updated (as opposed to all instances of a token). - // If there is active media from past jobs, tokenInstance's media will reference that media instead. - tokenInstance *persist.TokenGallery + // tokenMetadata is metadata to use to retrieve media from. If not set or refreshMetadata is enabled, the pipeline will try to get new metadata. + tokenMetadata persist.TokenMetadata } type PipelineOption func(*tokenProcessingJob) @@ -88,13 +81,13 @@ func (pOpts) WithRefreshMetadata() PipelineOption { } } -func (pOpts) WithTokenInstance(t *persist.TokenGallery) PipelineOption { +func (pOpts) WithMetadata(t persist.TokenMetadata) PipelineOption { return func(j *tokenProcessingJob) { - j.tokenInstance = t + j.tokenMetadata = t } } -func (tp *tokenProcessor) ProcessTokenPipeline(ctx context.Context, token persist.TokenIdentifiers, contract persist.ContractGallery, cause persist.ProcessingCause, opts ...PipelineOption) (coredb.TokenMedia, error) { +func (tp *tokenProcessor) ProcessTokenPipeline(ctx context.Context, token persist.TokenIdentifiers, contract persist.ContractIdentifiers, cause persist.ProcessingCause, opts ...PipelineOption) (db.TokenMedia, error) { runID := persist.GenerateID() ctx = logger.NewContextWithFields(ctx, logrus.Fields{"runID": runID}) @@ -113,8 +106,8 @@ func (tp *tokenProcessor) ProcessTokenPipeline(ctx context.Context, token persis } startTime := time.Now() - media, err := job.run(ctx) - recordPipelineEndState(ctx, tp.mr, media, err, time.Since(startTime), cause) + media, err := job.Run(ctx) + recordPipelineEndState(ctx, tp.mr, job.token.Chain, media, err, time.Since(startTime), cause) if err != nil { reportJobError(ctx, err, *job) @@ -136,7 +129,7 @@ func (e ErrBadToken) Unwrap() error { return e.Err } -func (tpj *tokenProcessingJob) run(ctx context.Context) (coredb.TokenMedia, error) { +func (tpj *tokenProcessingJob) Run(ctx context.Context) (db.TokenMedia, error) { span, ctx := tracing.StartSpan(ctx, "pipeline.run", fmt.Sprintf("run %s", tpj.id)) defer tracing.FinishSpan(span) @@ -147,42 +140,38 @@ func (tpj *tokenProcessingJob) run(ctx context.Context) (coredb.TokenMedia, erro media, mediaErr := tpj.createMediaForToken(mediaCtx) - if err := tpj.persistResults(ctx, media); err != nil { + err := tpj.persistResults(ctx, media) + if err != nil { return media, err } return media, mediaErr } -func (tpj *tokenProcessingJob) createMediaForToken(ctx context.Context) (coredb.TokenMedia, error) { +func (tpj *tokenProcessingJob) createMediaForToken(ctx context.Context) (db.TokenMedia, error) { traceCallback, ctx := persist.TrackStepStatus(ctx, &tpj.pipelineMetadata.CreateMedia, "CreateMedia") defer traceCallback() - result := coredb.TokenMedia{ - ID: persist.GenerateID(), - ContractID: tpj.contract.ID, - TokenID: tpj.token.TokenID, - Chain: tpj.token.Chain, - Active: true, - ProcessingJobID: tpj.id, + if tpj.tokenMetadata == nil || tpj.refreshMetadata { + tpj.addMetadata(ctx) } - result.Metadata = tpj.retrieveMetadata(ctx) - - result.Name, result.Description = tpj.retrieveTokenInfo(ctx, result.Metadata) - - tokenMedia, err := tpj.cacheMediaObjects(ctx, result.Metadata) - result.Media = tokenMedia + tokenMedia, err := tpj.cacheMediaObjects(ctx, tpj.tokenMetadata) // Wrap the error to indicate that the token is bad to callers if errors.Is(err, media.ErrNoMediaURLs) || util.ErrorAs[errInvalidMedia](err) { err = ErrBadToken{err} } - return result, err + return db.TokenMedia{ + ID: persist.GenerateID(), + Active: true, + ProcessingJobID: tpj.id, + Media: tokenMedia, + }, err } -func (tpj *tokenProcessingJob) retrieveMetadata(ctx context.Context) persist.TokenMetadata { +func (tpj *tokenProcessingJob) addMetadata(ctx context.Context) { traceCallback, ctx := persist.TrackStepStatus(ctx, &tpj.pipelineMetadata.MetadataRetrieval, "MetadataRetrieval") defer traceCallback() @@ -190,59 +179,34 @@ func (tpj *tokenProcessingJob) retrieveMetadata(ctx context.Context) persist.Tok ctx, cancel := context.WithTimeout(ctx, time.Minute) defer cancel() - var newMetadata persist.TokenMetadata - - if tpj.tokenInstance != nil { - newMetadata = tpj.tokenInstance.TokenMetadata - } - - if len(newMetadata) == 0 || tpj.refreshMetadata { - i, a := tpj.contract.Chain.BaseKeywords() - fieldRequests := []multichain.FieldRequest[string]{ - { - FieldNames: append(i, a...), - Level: multichain.FieldRequirementLevelOneRequired, - }, - { - FieldNames: []string{"name", "description"}, - Level: multichain.FieldRequirementLevelAllOptional, - }, - } - mcMetadata, err := tpj.tp.mc.GetTokenMetadataByTokenIdentifiers(ctx, tpj.contract.Address, tpj.token.TokenID, tpj.token.Chain, fieldRequests) - if err != nil { - logger.For(ctx).Warnf("error getting metadata from chain: %s", err) - persist.FailStep(&tpj.pipelineMetadata.MetadataRetrieval) - } else if len(mcMetadata) > 0 { - logger.For(ctx).Infof("got metadata from chain: %v", mcMetadata) - newMetadata = mcMetadata - } + i, a := tpj.contract.Chain.BaseKeywords() + fieldRequests := []multichain.FieldRequest[string]{ + { + FieldNames: append(i, a...), + Level: multichain.FieldRequirementLevelOneRequired, + }, + { + FieldNames: []string{"name", "description"}, + Level: multichain.FieldRequirementLevelAllOptional, + }, } - if len(newMetadata) == 0 { + mcMetadata, err := tpj.tp.mc.GetTokenMetadataByTokenIdentifiers(ctx, tpj.contract.ContractAddress, tpj.token.TokenID, tpj.token.Chain, fieldRequests) + if err != nil { + logger.For(ctx).Warnf("error getting metadata from chain: %s", err) persist.FailStep(&tpj.pipelineMetadata.MetadataRetrieval) + } else if len(mcMetadata) > 0 { + logger.For(ctx).Infof("got metadata from chain: %v", mcMetadata) + tpj.tokenMetadata = mcMetadata } - return newMetadata -} - -func (tpj *tokenProcessingJob) retrieveTokenInfo(ctx context.Context, metadata persist.TokenMetadata) (string, string) { - traceCallback, ctx := persist.TrackStepStatus(ctx, &tpj.pipelineMetadata.TokenInfoRetrieval, "TokenInfoRetrieval") - defer traceCallback() - - name, description := findNameAndDescription(ctx, metadata) - - if name == "" && tpj.tokenInstance != nil { - name = tpj.tokenInstance.Name.String() - } - - if description == "" && tpj.tokenInstance != nil { - description = tpj.tokenInstance.Description.String() + if len(tpj.tokenMetadata) == 0 { + persist.FailStep(&tpj.pipelineMetadata.MetadataRetrieval) } - return name, description } func (tpj *tokenProcessingJob) cacheMediaObjects(ctx context.Context, metadata persist.TokenMetadata) (persist.Media, error) { - imgURL, animURL, err := findImageAndAnimationURLs(ctx, tpj.contract.Address, tpj.token.Chain, metadata, tpj.pipelineMetadata) + imgURL, animURL, err := findImageAndAnimationURLs(ctx, tpj.contract.ContractAddress, tpj.token.Chain, metadata, tpj.pipelineMetadata) if err != nil { return persist.Media{MediaType: persist.MediaTypeUnknown}, err } @@ -349,7 +313,7 @@ func (tpj *tokenProcessingJob) createRawMedia(ctx context.Context, mediaType per traceCallback, ctx := persist.TrackStepStatus(ctx, &tpj.pipelineMetadata.CreateRawMedia, "CreateRawMedia") defer traceCallback() - return createRawMedia(ctx, persist.NewTokenIdentifiers(tpj.contract.Address, tpj.token.TokenID, tpj.token.Chain), mediaType, tpj.tp.tokenBucket, animURL, imgURL, objects) + return createRawMedia(ctx, persist.NewTokenIdentifiers(tpj.contract.ContractAddress, tpj.token.TokenID, tpj.token.Chain), mediaType, tpj.tp.tokenBucket, animURL, imgURL, objects) } func (tpj *tokenProcessingJob) isNewMediaPreferable(ctx context.Context, media persist.Media) bool { @@ -358,7 +322,7 @@ func (tpj *tokenProcessingJob) isNewMediaPreferable(ctx context.Context, media p return media.IsServable() } -func (tpj *tokenProcessingJob) persistResults(ctx context.Context, tmetadata coredb.TokenMedia) error { +func (tpj *tokenProcessingJob) persistResults(ctx context.Context, tmetadata db.TokenMedia) error { if !tpj.isNewMediaPreferable(ctx, tmetadata.Media) { tmetadata.Active = false } @@ -367,7 +331,7 @@ func (tpj *tokenProcessingJob) persistResults(ctx context.Context, tmetadata cor } -func (tpj *tokenProcessingJob) upsertDB(ctx context.Context, tmetadata coredb.TokenMedia) error { +func (tpj *tokenProcessingJob) upsertDB(ctx context.Context, tmetadata db.TokenMedia) error { p := persist.TokenProperties{ HasMetadata: tmetadata.Metadata != nil && len(tmetadata.Metadata) > 0, HasPrimaryMedia: tmetadata.Media.MediaType.IsValid() && tmetadata.Media.MediaURL != "", @@ -384,7 +348,7 @@ func (tpj *tokenProcessingJob) upsertDB(ctx context.Context, tmetadata coredb.To tokenDBID = tpj.tokenInstance.ID } - return tpj.tp.queries.InsertTokenPipelineResults(ctx, coredb.InsertTokenPipelineResultsParams{ + return tpj.tp.queries.InsertTokenPipelineResults(ctx, db.InsertTokenPipelineResultsParams{ Chain: tpj.token.Chain, ContractID: tpj.contract.ID, TokenID: tpj.token.TokenID, @@ -428,9 +392,9 @@ func pipelineErroredMetric() metric.Measure { return metric.Measure{Name: metricPipelineErrored} } -func recordPipelineEndState(ctx context.Context, mr metric.MetricReporter, tokenMedia coredb.TokenMedia, err error, d time.Duration, cause persist.ProcessingCause) { +func recordPipelineEndState(ctx context.Context, mr metric.MetricReporter, chain persist.Chain, tokenMedia db.TokenMedia, err error, d time.Duration, cause persist.ProcessingCause) { baseOpts := append([]any{}, metric.LogOptions.WithTags(map[string]string{ - "chain": fmt.Sprintf("%d", tokenMedia.Chain), + "chain": fmt.Sprintf("%d", chain), "mediaType": tokenMedia.Media.MediaType.String(), "cause": cause.String(), "isBadToken": fmt.Sprintf("%t", isBadTokenErr(err)), diff --git a/tokenprocessing/process.go b/tokenprocessing/process.go index ca44157f7..3a0a5a66a 100644 --- a/tokenprocessing/process.go +++ b/tokenprocessing/process.go @@ -16,7 +16,7 @@ import ( "github.com/sirupsen/logrus" "github.com/sourcegraph/conc/pool" - "github.com/mikeydub/go-gallery/db/gen/coredb" + db "github.com/mikeydub/go-gallery/db/gen/coredb" "github.com/mikeydub/go-gallery/env" "github.com/mikeydub/go-gallery/event" "github.com/mikeydub/go-gallery/service/eth" @@ -38,7 +38,7 @@ type ProcessMediaForTokenInput struct { Chain persist.Chain `json:"chain"` } -func processMediaForUsersTokens(tp *tokenProcessor, tokenRepo *postgres.TokenGalleryRepository, contractRepo *postgres.ContractGalleryRepository, tm *tokenmanage.Manager) gin.HandlerFunc { +func processMediaForUsersTokens(tp *tokenProcessor, tokenDetailsRepo *postgres.TokenFullDetailsRepository, tm *tokenmanage.Manager) gin.HandlerFunc { return func(c *gin.Context) { var input task.TokenProcessingUserMessage if err := c.ShouldBindJSON(&input); err != nil { @@ -56,18 +56,12 @@ func processMediaForUsersTokens(tp *tokenProcessor, tokenRepo *postgres.TokenGal tokenID := tokenID wp.Go(func() error { - token, err := tokenRepo.GetByID(reqCtx, tokenID) + td, err := tokenDetailsRepo.GetByID(reqCtx, tokenID) if err != nil { return err } - - contract, err := contractRepo.GetByID(reqCtx, token.Contract.ID) - if err != nil { - logger.For(reqCtx).Errorf("error getting contract: %s", err) - } - ctx := sentryutil.NewSentryHubContext(reqCtx) - _, err = processFromInstanceManaged(ctx, tp, tm, token, contract, persist.ProcessingCauseSync, 0) + _, err = processFromInstanceManaged(ctx, tp, tm, td, persist.ProcessingCauseSync, 0) return err }) } @@ -79,7 +73,7 @@ func processMediaForUsersTokens(tp *tokenProcessor, tokenRepo *postgres.TokenGal } } -func processMediaForTokenIdentifiers(tp *tokenProcessor, queries *coredb.Queries, tm *tokenmanage.Manager) gin.HandlerFunc { +func processMediaForTokenIdentifiers(tp *tokenProcessor, queries *db.Queries, tm *tokenmanage.Manager) gin.HandlerFunc { return func(c *gin.Context) { var input ProcessMediaForTokenInput if err := c.ShouldBindJSON(&input); err != nil { @@ -87,48 +81,23 @@ func processMediaForTokenIdentifiers(tp *tokenProcessor, queries *coredb.Queries return } - r, err := queries.GetTokenDefinitionAndContractByTokenIdentifiers(c, coredb.GetTokenDefinitionAndContractByTokenIdentifiersParams{ + tokenDef, err := queries.GetTokenDefinitionByTokenIdentifiers(c, db.GetTokenDefinitionByTokenIdentifiersParams{ Chain: input.Chain, ContractAddress: input.ContractAddress, TokenID: input.TokenID, }) + if err == pgx.ErrNoRows { + util.ErrResponse(c, http.StatusNotFound, err) + return + } if err != nil { util.ErrResponse(c, http.StatusInternalServerError, err) return } - // token, err := tokenRepo.GetByTokenIdentifiers(c, input.TokenID, input.ContractAddress, input.Chain) - // if err != nil { - // if util.ErrorAs[persist.ErrTokenGalleryNotFoundByIdentifiers](err) { - // util.ErrResponse(c, http.StatusNotFound, err) - // return - // } - // util.ErrResponse(c, http.StatusInternalServerError, err) - // return - // } - - // if len(tokens) == 0 { - // util.ErrResponse(c, http.StatusNotFound, persist.ErrTokenGalleryNotFoundByIdentifiers{ - // TokenID: input.TokenID, - // ContractAddress: input.ContractAddress, - // Chain: input.Chain, - // }) - // return - // } - - // token = tokens[0] - - // contract, err := contractRepo.GetByID(c, token.Contract.ID) - // if err != nil { - // if util.ErrorAs[persist.ErrContractNotFoundByID](err) { - // util.ErrResponse(c, http.StatusNotFound, err) - // return - // } - // util.ErrResponse(c, http.StatusInternalServerError, err) - // return - // } - - _, err = processFromInstanceManaged(c, tp, tm, r.TokenDefinition, r.Contract, persist.ProcessingCauseRefresh, 0) + cID := persist.NewContractIdentifiers(input.ContractAddress, input.Chain) + + _, err = processFromTokenDefinitionManaged(c, tp, tm, tokenDef, cID, persist.ProcessingCauseRefresh, 0) if err != nil { if util.ErrorAs[ErrBadToken](err) { @@ -144,7 +113,7 @@ func processMediaForTokenIdentifiers(tp *tokenProcessor, queries *coredb.Queries } // processMediaForTokenManaged processes a single token instance. It's only called for tokens that failed during a sync. -func processMediaForTokenManaged(tp *tokenProcessor, tokenRepo *postgres.TokenGalleryRepository, contractRepo *postgres.ContractGalleryRepository, tm *tokenmanage.Manager) gin.HandlerFunc { +func processMediaForTokenManaged(tp *tokenProcessor, tokenDetailsRepo *postgres.TokenFullDetailsRepository, tm *tokenmanage.Manager) gin.HandlerFunc { return func(c *gin.Context) { var input task.TokenProcessingTokenMessage @@ -154,13 +123,20 @@ func processMediaForTokenManaged(tp *tokenProcessor, tokenRepo *postgres.TokenGa return } - contract, err := contractRepo.GetByAddress(c, input.Token.ContractAddress, input.Token.Chain) - if err != nil { + tDefs, err := tokenDetailsRepo.GetByDefinitionID(c, input.TokenDefinitionID) + if err == pgx.ErrNoRows { util.ErrResponse(c, http.StatusNotFound, err) return } + if err != nil { + util.ErrResponse(c, http.StatusInternalServerError, err) + return + } + + cID := persist.NewContractIdentifiers(tDefs[0].Contract.Address, tDefs[0].Contract.Chain) - if _, err = processFromIdentifiersManaged(c, tp, tm, input.Token, contract, persist.ProcessingCauseSyncRetry, input.Attempts); err != nil { + _, err = processFromTokenDefinitionManaged(c, tp, tm, tDefs[0].Definition, cID, persist.ProcessingCauseSyncRetry, input.Attempts) + if err != nil { // Only log the error, because tokenmanage will handle reprocessing logger.For(c).Errorf("error processing token: %s", err) } @@ -204,7 +180,7 @@ func processOwnersForContractTokens(mc *multichain.Provider, contractRepo *postg } } -func processOwnersForUserTokens(mc *multichain.Provider, queries *coredb.Queries) gin.HandlerFunc { +func processOwnersForUserTokens(mc *multichain.Provider, queries *db.Queries) gin.HandlerFunc { return func(c *gin.Context) { var input task.TokenProcessingUserTokensMessage if err := c.ShouldBindJSON(&input); err != nil { @@ -223,7 +199,7 @@ func processOwnersForUserTokens(mc *multichain.Provider, queries *coredb.Queries for _, token := range newTokens { var curTotal persist.HexString - dbUniqueTokenIDs, err := queries.GetUniqueTokenIdentifiersByTokenID(c, token.ID) + dbUniqueTokenIDs, err := queries.GetUniqueTokenIdentifiersByTokenID(c, token.Instance.ID) if err != nil { logger.For(c).Errorf("error getting unique token identifiers: %s", err) continue @@ -238,22 +214,22 @@ func processOwnersForUserTokens(mc *multichain.Provider, queries *coredb.Queries } // verify the total is less than or equal to the total in the db - if curTotal.BigInt().Cmp(token.Quantity.BigInt()) > 0 { + if curTotal.BigInt().Cmp(token.Instance.Quantity.BigInt()) > 0 { logger.For(c).Errorf("error: total quantity of tokens in db is greater than total quantity of tokens on chain") continue } // one event per token identifier (grouping ERC-1155s) - err = event.Dispatch(c, coredb.Event{ + err = event.Dispatch(c, db.Event{ ID: persist.GenerateID(), ActorID: persist.DBIDToNullStr(input.UserID), ResourceTypeID: persist.ResourceTypeToken, - SubjectID: token.ID, + SubjectID: token.Instance.ID, UserID: input.UserID, - TokenID: token.ID, + TokenID: token.Instance.ID, Action: persist.ActionNewTokensReceived, Data: persist.EventData{ - NewTokenID: token.ID, + NewTokenID: token.Instance.ID, NewTokenQuantity: curTotal, }, }) @@ -348,7 +324,7 @@ var alchemyIPs = []string{ "34.237.24.169", } -func processOwnersForAlchemyTokens(mc *multichain.Provider, queries *coredb.Queries) gin.HandlerFunc { +func processOwnersForAlchemyTokens(mc *multichain.Provider, queries *db.Queries) gin.HandlerFunc { return func(c *gin.Context) { if !util.Contains(alchemyIPs, c.ClientIP()) { @@ -410,7 +386,7 @@ func processOwnersForAlchemyTokens(mc *multichain.Provider, queries *coredb.Quer userID, ok := addressToUsers[persist.NewChainAddress(activity.ToAddress, chain)] if !ok { - user, err := queries.GetUserByAddressAndChains(c, coredb.GetUserByAddressAndChainsParams{ + user, err := queries.GetUserByAddressAndChains(c, db.GetUserByAddressAndChainsParams{ Address: persist.Address(chain.NormalizeAddress(activity.ToAddress)), Chains: util.MapWithoutError(persist.EvmChains, func(c persist.Chain) int32 { return int32(c) }), }) @@ -491,7 +467,7 @@ func processOwnersForAlchemyTokens(mc *multichain.Provider, queries *coredb.Quer for _, token := range newTokens { var curTotal persist.HexString - dbUniqueTokenIDs, err := queries.GetUniqueTokenIdentifiersByTokenID(c, token.ID) + dbUniqueTokenIDs, err := queries.GetUniqueTokenIdentifiersByTokenID(c, token.Instance.ID) if err != nil { logger.For(c).Errorf("error getting unique token identifiers: %s", err) continue @@ -506,22 +482,22 @@ func processOwnersForAlchemyTokens(mc *multichain.Provider, queries *coredb.Quer } // verify the total is less than or equal to the total in the db - if curTotal.BigInt().Cmp(token.Quantity.BigInt()) > 0 { + if curTotal.BigInt().Cmp(token.Instance.Quantity.BigInt()) > 0 { l.Errorf("error: total quantity of tokens in db is greater than total quantity of tokens on chain") continue } // one event per token identifier (grouping ERC-1155s) - err = event.Dispatch(c, coredb.Event{ + err = event.Dispatch(c, db.Event{ ID: persist.GenerateID(), ActorID: persist.DBIDToNullStr(userID), ResourceTypeID: persist.ResourceTypeToken, - SubjectID: token.ID, + SubjectID: token.Instance.ID, UserID: userID, - TokenID: token.ID, + TokenID: token.Instance.ID, Action: persist.ActionNewTokensReceived, Data: persist.EventData{ - NewTokenID: token.ID, + NewTokenID: token.Instance.ID, NewTokenQuantity: curTotal, }, }) @@ -559,9 +535,9 @@ func isValidAlchemySignatureForStringBody( } // detectSpamContracts refreshes the alchemy_spam_contracts table with marked contracts from Alchemy -func detectSpamContracts(queries *coredb.Queries) gin.HandlerFunc { +func detectSpamContracts(queries *db.Queries) gin.HandlerFunc { return func(c *gin.Context) { - var params coredb.InsertSpamContractsParams + var params db.InsertSpamContractsParams now := time.Now() @@ -640,7 +616,7 @@ func detectSpamContracts(queries *coredb.Queries) gin.HandlerFunc { } } -func processWalletRemoval(queries *coredb.Queries) gin.HandlerFunc { +func processWalletRemoval(queries *db.Queries) gin.HandlerFunc { return func(c *gin.Context) { var input task.TokenProcessingWalletRemovalMessage if err := c.ShouldBindJSON(&input); err != nil { @@ -654,7 +630,7 @@ func processWalletRemoval(queries *coredb.Queries) gin.HandlerFunc { // processing multiple wallet removals, we'll just process them in a loop here, because tuning the // underlying query to handle multiple wallet removals at a time is difficult. for _, walletID := range input.WalletIDs { - err := queries.RemoveWalletFromTokens(c, coredb.RemoveWalletFromTokensParams{ + err := queries.RemoveWalletFromTokens(c, db.RemoveWalletFromTokensParams{ WalletID: walletID.String(), UserID: input.UserID, }) @@ -674,7 +650,7 @@ func processWalletRemoval(queries *coredb.Queries) gin.HandlerFunc { } } -func processPostPreflight(tp *tokenProcessor, tm *tokenmanage.Manager, q *coredb.Queries, mc *multichain.Provider, contractRepo *postgres.ContractGalleryRepository, userRepo *postgres.UserRepository, tokenRepo *postgres.TokenGalleryRepository) gin.HandlerFunc { +func processPostPreflight(tp *tokenProcessor, tm *tokenmanage.Manager, q *db.Queries, mc *multichain.Provider, contractRepo *postgres.ContractGalleryRepository, userRepo *postgres.UserRepository, tokenRepo *postgres.TokenGalleryRepository) gin.HandlerFunc { return func(c *gin.Context) { var input task.PostPreflightMessage @@ -684,7 +660,7 @@ func processPostPreflight(tp *tokenProcessor, tm *tokenmanage.Manager, q *coredb return } - existingMedia, err := q.GetMediaByUserTokenIdentifiers(c, coredb.GetMediaByUserTokenIdentifiersParams{ + existingMedia, err := q.GetMediaByUserTokenIdentifiers(c, db.GetMediaByUserTokenIdentifiersParams{ UserID: input.UserID, Chain: input.Token.Chain, Address: input.Token.ContractAddress, @@ -743,33 +719,31 @@ func processPostPreflight(tp *tokenProcessor, tm *tokenmanage.Manager, q *coredb } } -func processTokenManaged(ctx context.Context, tp *tokenProcessor, tm *tokenmanage.Manager, td coredb.TokenDefinition, c coredb.Contract) { - ctx = logger.NewContextWithFields(ctx, logrus.Fields{"tokenDefinitionDBID": td.ID}) -} - -func processFromInstanceManaged(ctx context.Context, tp *tokenProcessor, tm *tokenmanage.Manager, token persist.TokenGallery, contract persist.ContractGallery, cause persist.ProcessingCause, attempts int) (coredb.TokenMedia, error) { - ctx = logger.NewContextWithFields(ctx, logrus.Fields{"tokenDBID": token.ID}) - t := persist.NewTokenIdentifiers(contract.Address, token.TokenID, token.Chain) - media, err := processFromIdentifiersManaged(ctx, tp, tm, t, contract, cause, attempts, PipelineOpts.WithTokenInstance(&token)) - return media, err +func processFromInstanceManaged(ctx context.Context, tp *tokenProcessor, tm *tokenmanage.Manager, t postgres.TokenFullDetails, cause persist.ProcessingCause, attempts int, opts ...PipelineOption) (db.TokenMedia, error) { + ctx = logger.NewContextWithFields(ctx, logrus.Fields{"tokenDBID": t.Instance.ID}) + cID := persist.NewContractIdentifiers(t.Contract.Address, t.Contract.Chain) + return processFromTokenDefinitionManaged(ctx, tp, tm, t.Definition, cID, cause, attempts, opts...) } -func processFromIdentifiersManaged(ctx context.Context, tp *tokenProcessor, tm *tokenmanage.Manager, token persist.TokenIdentifiers, contract persist.ContractGallery, cause persist.ProcessingCause, attempts int, opts ...PipelineOption) (coredb.TokenMedia, error) { +func processFromTokenDefinitionManaged(ctx context.Context, tp *tokenProcessor, tm *tokenmanage.Manager, td db.TokenDefinition, cID persist.ContractIdentifiers, cause persist.ProcessingCause, attempts int, opts ...PipelineOption) (db.TokenMedia, error) { ctx = logger.NewContextWithFields(ctx, logrus.Fields{ - "tokenID": token.TokenID, - "tokenID_base10": token.TokenID.Base10String(), - "contractDBID": contract.ID, - "contractAddress": contract.Address, - "chain": token.Chain, + "tokenDefinitionDBID": td.ID, + "contractDBID": td.ContractID, + "tokenID": td.TokenID, + "tokenID_base10": td.TokenID.Base10String(), + "contractAddress": cID.ContractAddress, + "chain": cID.Chain, }) - closing, err := tm.StartProcessing(ctx, token, attempts) + closing, err := tm.StartProcessing(ctx, td.ID, attempts) if err != nil { - return coredb.TokenMedia{}, err + return db.TokenMedia{}, err } - runOpts := append([]PipelineOption{}, addContractRunOptions(contract)...) + runOpts := append([]PipelineOption{}, addContractRunOptions(cID)...) runOpts = append(runOpts, addContextRunOptions(cause)...) + runOpts = append(runOpts, PipelineOpts.WithMetadata(td.Metadata)) runOpts = append(runOpts, opts...) - media, err := tp.ProcessTokenPipeline(ctx, token, contract, cause, runOpts...) + tID := persist.NewTokenIdentifiers(cID.ContractAddress, td.TokenID, td.Chain) + media, err := tp.ProcessTokenPipeline(ctx, tID, cID, cause, runOpts...) defer closing(err) return media, err } @@ -783,8 +757,8 @@ func addContextRunOptions(cause persist.ProcessingCause) (opts []PipelineOption) } // addContractRunOptions adds pipeline options for specific contracts -func addContractRunOptions(contract persist.ContractGallery) (opts []PipelineOption) { - if contract.Address == eth.EnsAddress { +func addContractRunOptions(contract persist.ContractIdentifiers) (opts []PipelineOption) { + if contract.Chain == persist.ChainETH && contract.ContractAddress == eth.EnsAddress { opts = append(opts, PipelineOpts.WithProfileImageKey("profile_image")) } return opts