From 7ed07254918ba961b5d08c12674939eefaeb9d23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Lewandowski?= <35259896+pawellewandowski98@users.noreply.github.com> Date: Fri, 12 Jan 2024 13:29:20 +0100 Subject: [PATCH 01/13] feat(BUX-417): remove monitor, ITC flag and IncomingTransaction (#532) * chore(BUX-417): remove monitor * chore(BUX-417): remove ITC flag * chore(BUX-417): remove incoming tx --- action_destination.go | 18 +- action_destination_test.go | 24 +- action_transaction.go | 8 +- bux_suite_test.go | 4 +- chainstate/chainstate.go | 5 - chainstate/client.go | 11 - chainstate/client_options.go | 19 - chainstate/definitions.go | 1 - chainstate/errors.go | 3 - chainstate/interface.go | 55 --- chainstate/monitor.go | 238 ----------- chainstate/monitor_client.go | 100 ----- client.go | 32 -- client_internal.go | 57 --- client_options.go | 10 - client_options_test.go | 118 +++--- cron_job_declarations.go | 5 - cron_job_definitions.go | 11 - definitions.go | 66 ++- examples/client/custom_cron/custom_cron.go | 4 +- interface.go | 5 +- locks.go | 2 - mock_chainstate_test.go | 4 - model_destinations.go | 18 +- model_destinations_test.go | 21 +- model_incoming_transactions_test.go | 72 ---- model_transaction_config_test.go | 2 +- model_transactions.go | 15 - model_transactions_test.go | 11 +- models_test.go | 9 +- monitor.go | 103 ----- monitor_event_handler.go | 460 --------------------- paymail_service_provider.go | 20 +- record_tx_strategy_external_incoming_tx.go | 31 -- 34 files changed, 136 insertions(+), 1426 deletions(-) delete mode 100644 chainstate/monitor.go delete mode 100644 chainstate/monitor_client.go delete mode 100644 model_incoming_transactions_test.go delete mode 100644 monitor.go delete mode 100644 monitor_event_handler.go diff --git a/action_destination.go b/action_destination.go index ee2d14a3..4c656dda 100644 --- a/action_destination.go +++ b/action_destination.go @@ -2,18 +2,14 @@ package bux import ( "context" - "database/sql" - "time" - "github.com/mrz1836/go-datastore" - customTypes "github.com/mrz1836/go-datastore/custom_types" ) // NewDestination will get a new destination for an existing xPub // // xPubKey is the raw public xPub func (c *Client) NewDestination(ctx context.Context, xPubKey string, chain uint32, - destinationType string, monitor bool, opts ...ModelOps) (*Destination, error) { + destinationType string, opts ...ModelOps) (*Destination, error) { // Check for existing NewRelic transaction ctx = c.GetOrStartTxn(ctx, "new_destination") @@ -39,13 +35,6 @@ func (c *Client) NewDestination(ctx context.Context, xPubKey string, chain uint3 return nil, err } - if monitor { - destination.Monitor = customTypes.NullTime{NullTime: sql.NullTime{ - Valid: true, - Time: time.Now(), - }} - } - // Save the destination if err = destination.Save(ctx); err != nil { return nil, err @@ -57,7 +46,7 @@ func (c *Client) NewDestination(ctx context.Context, xPubKey string, chain uint3 // NewDestinationForLockingScript will create a new destination based on a locking script func (c *Client) NewDestinationForLockingScript(ctx context.Context, xPubID, lockingScript string, - monitor bool, opts ...ModelOps) (*Destination, error) { + opts ...ModelOps) (*Destination, error) { // Check for existing NewRelic transaction ctx = c.GetOrStartTxn(ctx, "new_destination_for_locking_script") @@ -77,6 +66,7 @@ func (c *Client) NewDestinationForLockingScript(ctx context.Context, xPubID, loc return nil, ErrUnknownLockingScript } +<<<<<<< HEAD // set the monitoring, passed down from the initiating function // this will be set when calling NewDestination from http, but not for instance paymail if monitor { @@ -86,6 +76,8 @@ func (c *Client) NewDestinationForLockingScript(ctx context.Context, xPubID, loc }} } +======= +>>>>>>> 06feaba (feat(BUX-417): remove monitor, ITC flag and IncomingTransaction (#532)) // Save the destination if err := destination.Save(ctx); err != nil { return nil, err diff --git a/action_destination_test.go b/action_destination_test.go index 72483cab..9814c01f 100644 --- a/action_destination_test.go +++ b/action_destination_test.go @@ -30,7 +30,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_NewDestination() { var destination *Destination destination, err = tc.client.NewDestination( - ctx, testXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, opts..., + ctx, testXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) assert.NoError(t, err) assert.Equal(t, "fc1e635d98151c6008f29908ee2928c60c745266f9853e945c917b1baa05973e", destination.ID) @@ -42,7 +42,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_NewDestination() { assert.Equal(t, "test-value", destination.Metadata["test-key"]) destination2, err2 := tc.client.NewDestination( - ctx, testXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, opts..., + ctx, testXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) assert.NoError(t, err2) assert.Equal(t, testXPubID, destination2.XpubID) @@ -65,7 +65,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_NewDestination() { destination, err := tc.client.NewDestination( context.Background(), testXPub, utils.ChainExternal, - utils.ScriptTypePubKeyHash, false, opts..., + utils.ScriptTypePubKeyHash, opts..., ) require.Error(t, err) require.Nil(t, destination) @@ -99,7 +99,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_NewDestinationForLockingScript() { var destination *Destination destination, err = tc.client.NewDestinationForLockingScript( - tc.ctx, testXPubID, lockingScript, false, opts..., + tc.ctx, testXPubID, lockingScript, opts..., ) assert.NoError(t, err) assert.Equal(t, "a64c7aca7110c7cde92245252a58bb18a4317381fc31fc293f6aafa3fcc7019f", destination.ID) @@ -118,7 +118,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_NewDestinationForLockingScript() { opts := append(tc.client.DefaultModelOptions(), WithMetadatas(metadata)) destination, err := tc.client.NewDestinationForLockingScript( - tc.ctx, testXPubID, "", false, + tc.ctx, testXPubID, "", opts..., ) require.Error(t, err) @@ -147,7 +147,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_GetDestinations() { // Create a new destination destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) @@ -176,7 +176,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_GetDestinations() { // Create a new destination destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) @@ -212,7 +212,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_GetDestinationByAddress() { // Create a new destination destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) @@ -240,7 +240,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_GetDestinationByAddress() { // Create a new destination destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) @@ -276,7 +276,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_GetDestinationByLockingScript() { // Create a new destination destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) @@ -305,7 +305,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_GetDestinationByLockingScript() { // Create a new destination destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) @@ -340,7 +340,7 @@ func (ts *EmbeddedDBTestSuite) TestClient_UpdateDestinationMetadata() { opts := tc.client.DefaultModelOptions() opts = append(opts, WithMetadatas(metadata)) destination, err := tc.client.NewDestination( - tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + tc.ctx, rawKey, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) diff --git a/action_transaction.go b/action_transaction.go index 2186f54e..f6123d7e 100644 --- a/action_transaction.go +++ b/action_transaction.go @@ -46,13 +46,7 @@ func (c *Client) RecordRawTransaction(ctx context.Context, txHex string, ) (*Transaction, error) { ctx = c.GetOrStartTxn(ctx, "record_raw_transaction") - allowUnknown := true - monitor := c.options.chainstate.Monitor() - if monitor != nil { - allowUnknown = monitor.AllowUnknownTransactions() - } - - return saveRawTransaction(ctx, c, allowUnknown, txHex, opts...) + return saveRawTransaction(ctx, c, true, txHex, opts...) } // NewTransaction will create a new draft transaction and return it diff --git a/bux_suite_test.go b/bux_suite_test.go index aac0aaf9..543d6baa 100644 --- a/bux_suite_test.go +++ b/bux_suite_test.go @@ -3,12 +3,12 @@ package bux import ( "context" "fmt" + "github.com/rs/zerolog" "sync" "testing" "time" "github.com/BuxOrg/bux/chainstate" - "github.com/BuxOrg/bux/logging" "github.com/BuxOrg/bux/taskmanager" "github.com/BuxOrg/bux/tester" "github.com/DATA-DOG/go-sqlmock" @@ -68,7 +68,7 @@ type EmbeddedDBTestSuite struct { func (ts *EmbeddedDBTestSuite) serveMySQL() { defer ts.wg.Done() - logger := logging.GetDefaultLogger() + logger := zerolog.Nop() for { err := ts.MySQLServer.Start() diff --git a/chainstate/chainstate.go b/chainstate/chainstate.go index 1683cf04..6ba7243d 100644 --- a/chainstate/chainstate.go +++ b/chainstate/chainstate.go @@ -9,11 +9,6 @@ import ( "time" ) -// MonitorBlockHeaders will start up a block headers monitor -func (c *Client) MonitorBlockHeaders(_ context.Context) error { - return nil -} - // Broadcast will attempt to broadcast a transaction using the given providers func (c *Client) Broadcast(ctx context.Context, id, txHex string, timeout time.Duration) (string, error) { // Basic validation diff --git a/chainstate/client.go b/chainstate/client.go index db6f81bb..16a88f34 100644 --- a/chainstate/client.go +++ b/chainstate/client.go @@ -26,7 +26,6 @@ type ( config *syncConfig // Configuration for broadcasting and other chain-state actions debug bool // For extra logs and additional debug information logger *zerolog.Logger // Logger interface - monitor MonitorService // Monitor service newRelicEnabled bool // If NewRelic is enabled (parent application) userAgent string // Custom user agent for outgoing HTTP Requests } @@ -100,11 +99,6 @@ func (c *Client) Close(ctx context.Context) { c.options.config.minercraft = nil } - // Stop the active Monitor (if not already stopped) - if c.options.monitor != nil { - _ = c.options.monitor.Stop(ctx) - c.options.monitor = nil - } } } @@ -143,11 +137,6 @@ func (c *Client) Minercraft() minercraft.ClientInterface { return c.options.config.minercraft } -// Monitor will return the Monitor client -func (c *Client) Monitor() MonitorService { - return c.options.monitor -} - // BroadcastClient will return the BroadcastClient client func (c *Client) BroadcastClient() broadcast.Client { return c.options.config.broadcastClient diff --git a/chainstate/client_options.go b/chainstate/client_options.go index b9dd62e0..262d8122 100644 --- a/chainstate/client_options.go +++ b/chainstate/client_options.go @@ -122,25 +122,6 @@ func WithLogger(customLogger *zerolog.Logger) ClientOps { } } -// WithMonitoring will create a new monitorConfig interface with the given options -func WithMonitoring(ctx context.Context, monitorOptions *MonitorOptions) ClientOps { - return func(c *clientOptions) { - if monitorOptions != nil { - // Create the default Monitor for monitoring destinations - c.monitor = NewMonitor(ctx, monitorOptions) - } - } -} - -// WithMonitoringInterface will set the interface to use for monitoring the blockchain -func WithMonitoringInterface(monitor MonitorService) ClientOps { - return func(c *clientOptions) { - if monitor != nil { - c.monitor = monitor - } - } -} - // WithExcludedProviders will set a list of excluded providers func WithExcludedProviders(providers []string) ClientOps { return func(c *clientOptions) { diff --git a/chainstate/definitions.go b/chainstate/definitions.go index 25d180eb..1579b4ef 100644 --- a/chainstate/definitions.go +++ b/chainstate/definitions.go @@ -10,7 +10,6 @@ const ( defaultFalsePositiveRate = 0.01 defaultFeeLastCheckIgnore = 2 * time.Minute defaultMaxNumberOfDestinations = 100000 - defaultMonitorDays = 7 defaultQueryTimeOut = 15 * time.Second whatsOnChainRateLimitWithKey = 20 ) diff --git a/chainstate/errors.go b/chainstate/errors.go index f52b7764..070d9e0a 100644 --- a/chainstate/errors.go +++ b/chainstate/errors.go @@ -22,6 +22,3 @@ var ErrMissingBroadcastMiners = errors.New("missing: broadcasting miners") // ErrMissingQueryMiners is when query miners are missing var ErrMissingQueryMiners = errors.New("missing: query miners") - -// ErrMonitorNotAvailable is when the monitor processor is not available -var ErrMonitorNotAvailable = errors.New("monitor processor not available") diff --git a/chainstate/interface.go b/chainstate/interface.go index 555fec4a..d64787d4 100644 --- a/chainstate/interface.go +++ b/chainstate/interface.go @@ -8,8 +8,6 @@ import ( "github.com/BuxOrg/bux/utils" "github.com/bitcoin-sv/go-broadcast-client/broadcast" "github.com/centrifugal/centrifuge-go" - "github.com/libsv/go-bc" - "github.com/rs/zerolog" "github.com/tonicpow/go-minercraft/v2" ) @@ -51,64 +49,11 @@ type ClientInterface interface { HTTPClient() HTTPInterface IsDebug() bool IsNewRelicEnabled() bool - Monitor() MonitorService Network() Network QueryTimeout() time.Duration FeeUnit() *utils.FeeUnit } -// MonitorClient interface -type MonitorClient interface { - AddFilter(regex, item string) (centrifuge.PublishResult, error) - Connect() error - Disconnect() error - SetToken(token string) -} - -// MonitorHandler interface -type MonitorHandler interface { - SocketHandler - RecordBlockHeader(ctx context.Context, bh bc.BlockHeader) error - RecordTransaction(ctx context.Context, txHex string) error - SetMonitor(monitor *Monitor) -} - -// MonitorProcessor struct that defines interface to all filter processors -type MonitorProcessor interface { - Add(regexString, item string) error - Debug(bool) - FilterTransaction(txHex string) (string, error) - FilterTransactionPublishEvent(eData []byte) (string, error) - GetFilters() map[string]*BloomProcessorFilter - GetHash() string - IsDebug() bool - Logger() *zerolog.Logger - Reload(regexString string, items []string) error - SetFilter(regex string, filter []byte) error - SetLogger(logger *zerolog.Logger) - Test(regexString string, item string) bool -} - -// MonitorService for the monitoring -type MonitorService interface { - Add(regexpString string, item string) error - Connected() - Disconnected() - GetFalsePositiveRate() float64 - GetLockID() string - GetMaxNumberOfDestinations() int - GetMonitorDays() int - IsConnected() bool - IsDebug() bool - LoadMonitoredDestinations() bool - AllowUnknownTransactions() bool - Logger() *zerolog.Logger - Processor() MonitorProcessor - SaveDestinations() bool - Start(ctx context.Context, handler MonitorHandler, onStop func()) error - Stop(ctx context.Context) error -} - // SocketHandler is composite interface of centrifuge handlers interfaces type SocketHandler interface { OnConnect(*centrifuge.Client, centrifuge.ConnectEvent) diff --git a/chainstate/monitor.go b/chainstate/monitor.go deleted file mode 100644 index cc09f188..00000000 --- a/chainstate/monitor.go +++ /dev/null @@ -1,238 +0,0 @@ -package chainstate - -import ( - "context" - - "github.com/BuxOrg/bux/logging" - "github.com/BuxOrg/bux/utils" - "github.com/rs/zerolog" -) - -// Monitor starts a new monitorConfig to monitor and filter transactions from a source -// -// Internal struct with all options being private -type Monitor struct { - authToken string - buxAgentURL string - chainstateOptions *clientOptions - client MonitorClient - connected bool - debug bool - falsePositiveRate float64 - filterType string - handler MonitorHandler - loadMonitoredDestinations bool - lockID string - logger *zerolog.Logger - maxNumberOfDestinations int - mempoolSyncChannelActive bool - mempoolSyncChannel chan bool - monitorDays int - processor MonitorProcessor - saveTransactionsDestinations bool - onStop func() - allowUnknownTransactions bool -} - -// MonitorOptions options for starting this monitorConfig -type MonitorOptions struct { - AuthToken string `json:"token"` - BuxAgentURL string `json:"bux_agent_url"` - Debug bool `json:"debug"` - FalsePositiveRate float64 `json:"false_positive_rate"` - LoadMonitoredDestinations bool `json:"load_monitored_destinations"` - LockID string `json:"lock_id"` - MaxNumberOfDestinations int `json:"max_number_of_destinations"` - MonitorDays int `json:"monitor_days"` - ProcessorType string `json:"processor_type"` - SaveTransactionDestinations bool `json:"save_transaction_destinations"` - AllowUnknownTransactions bool `json:"allow_unknown_transactions"` // whether to allow transactions that do not have an xpub_in_id or xpub_out_id -} - -// checkDefaults will check for missing values and set default values -func (o *MonitorOptions) checkDefaults() { - // Set the default for Monitor Days (days in past) - if o.MonitorDays <= 0 { - o.MonitorDays = defaultMonitorDays - } - - // Set the false positive rate - if o.FalsePositiveRate <= 0 { - o.FalsePositiveRate = defaultFalsePositiveRate - } - - // Set the maximum number of destinations to monitor - if o.MaxNumberOfDestinations <= 0 { - o.MaxNumberOfDestinations = defaultMaxNumberOfDestinations - } - - // Set a unique lock id if it's not provided - if len(o.LockID) == 0 { // todo: lockID should always be set (return an error if not set?) - o.LockID, _ = utils.RandomHex(32) - } -} - -// NewMonitor starts a new monitorConfig and loads all addresses that need to be monitored into the bloom filter -func NewMonitor(_ context.Context, options *MonitorOptions) (monitor *Monitor) { - // Check the defaults - options.checkDefaults() - - // Set the default processor type if not recognized - if options.ProcessorType != FilterBloom && options.ProcessorType != FilterRegex { - options.ProcessorType = FilterBloom - } - - // Create a monitor struct - monitor = &Monitor{ - authToken: options.AuthToken, - buxAgentURL: options.BuxAgentURL, - debug: options.Debug, - falsePositiveRate: options.FalsePositiveRate, - filterType: options.ProcessorType, - loadMonitoredDestinations: options.LoadMonitoredDestinations, - lockID: options.LockID, - maxNumberOfDestinations: options.MaxNumberOfDestinations, - monitorDays: options.MonitorDays, - saveTransactionsDestinations: options.SaveTransactionDestinations, - allowUnknownTransactions: options.AllowUnknownTransactions, - } - - // Set logger if not set - if monitor.logger == nil { - monitor.logger = logging.GetDefaultLogger() - } - - // Switch on the filter type - switch monitor.filterType { - case FilterRegex: - monitor.processor = NewRegexProcessor() - default: - monitor.processor = NewBloomProcessor(uint(monitor.maxNumberOfDestinations), monitor.falsePositiveRate) - } - - // Load the settings for debugging and logging - monitor.processor.Debug(options.Debug) - monitor.processor.SetLogger(monitor.logger) - return -} - -// Add a new item to monitor -func (m *Monitor) Add(regexString, item string) error { - if m.processor == nil { - return ErrMonitorNotAvailable - } - // todo signal to bux-agent that a new item was added - if m.client != nil { - if _, err := m.client.AddFilter(regexString, item); err != nil { - return err - } - } else { - m.logger.Error().Msg("client was expected but not found") - } - return m.processor.Add(regexString, item) -} - -// Connected sets the connected state to true -func (m *Monitor) Connected() { - m.connected = true -} - -// Disconnected sets the connected state to false -func (m *Monitor) Disconnected() { - m.connected = false -} - -// GetMonitorDays gets the monitorDays option -func (m *Monitor) GetMonitorDays() int { - return m.monitorDays -} - -// GetFalsePositiveRate gets the falsePositiveRate option -func (m *Monitor) GetFalsePositiveRate() float64 { - return m.falsePositiveRate -} - -// GetLockID gets the lock id from the Monitor -func (m *Monitor) GetLockID() string { - return m.lockID -} - -// GetMaxNumberOfDestinations gets the monitorDays option -func (m *Monitor) GetMaxNumberOfDestinations() int { - return m.maxNumberOfDestinations -} - -// IsConnected returns whether we are connected to the socket -func (m *Monitor) IsConnected() bool { - return m.connected -} - -// IsDebug gets whether debugging is on -func (m *Monitor) IsDebug() bool { - return m.debug -} - -// LoadMonitoredDestinations gets where we want to add the monitored destinations from the database into the processor -func (m *Monitor) LoadMonitoredDestinations() bool { - return m.loadMonitoredDestinations -} - -// AllowUnknownTransactions gets whether we allow recording transactions with no relation to our xpubs -func (m *Monitor) AllowUnknownTransactions() bool { - return m.allowUnknownTransactions -} - -// Logger gets the current logger -func (m *Monitor) Logger() *zerolog.Logger { - return m.logger -} - -// Processor gets the monitor processor -func (m *Monitor) Processor() MonitorProcessor { - return m.processor -} - -// SaveDestinations gets whether we should save destinations from transactions that pass monitor filter -func (m *Monitor) SaveDestinations() bool { - return m.saveTransactionsDestinations -} - -// SetChainstateOptions sets the chainstate options on the monitor to allow more syncing capabilities -func (m *Monitor) SetChainstateOptions(options *clientOptions) { - m.chainstateOptions = options -} - -// Start open a socket to the service provider and monitorConfig transactions -func (m *Monitor) Start(_ context.Context, handler MonitorHandler, onStop func()) error { - if m.client == nil { - handler.SetMonitor(m) - m.handler = handler - m.logger.Info().Msgf("[MONITOR] Starting, connecting to server: %s", m.buxAgentURL) - m.client = newCentrifugeClient(m.buxAgentURL, handler) - if m.authToken != "" { - m.client.SetToken(m.authToken) - } - } - - m.onStop = onStop - - return m.client.Connect() -} - -// Stop closes the monitoring socket and pauses monitoring -func (m *Monitor) Stop(_ context.Context) error { - m.logger.Info().Msg("[MONITOR] Stopping monitor...") - if m.IsConnected() { // Only close if still connected - if m.mempoolSyncChannelActive { - close(m.mempoolSyncChannel) - m.mempoolSyncChannelActive = false - } - return m.client.Disconnect() - } - - if m.onStop != nil { - m.onStop() - } - - return nil -} diff --git a/chainstate/monitor_client.go b/chainstate/monitor_client.go deleted file mode 100644 index 981fb499..00000000 --- a/chainstate/monitor_client.go +++ /dev/null @@ -1,100 +0,0 @@ -package chainstate - -import ( - "bytes" - "encoding/json" - "time" - - "github.com/centrifugal/centrifuge-go" -) - -// AddFilterMessage defines a new filter to be published from the client -// todo Just rely on the agent for this data type -type AddFilterMessage struct { - Filter string `json:"filter"` - Hash string `json:"hash"` - Regex string `json:"regex"` - Timestamp int64 `json:"timestamp"` -} - -// SetFilterMessage defines a new filter message with a list of filters -type SetFilterMessage struct { - Filter []byte `json:"filter"` - Hash string `json:"hash"` - Regex string `json:"regex"` - Timestamp int64 `json:"timestamp"` -} - -// AgentClient implements MonitorClient with needed agent methods -type AgentClient struct { - *centrifuge.Client - Token string -} - -// Connect establishes connection to agent -func (a *AgentClient) Connect() error { - return a.Client.Connect() -} - -// Disconnect closes connection to agent -func (a *AgentClient) Disconnect() error { - return a.Client.Disconnect() -} - -// SetToken set the client token -func (a *AgentClient) SetToken(token string) { - a.Client.SetToken(token) -} - -// AddFilter adds a new filter to the agent -func (a *AgentClient) AddFilter(regex, item string) (centrifuge.PublishResult, error) { - msg := AddFilterMessage{ - Regex: regex, - Filter: item, - Timestamp: time.Now().Unix(), - } - data, err := json.Marshal(msg) - if err != nil { - return centrifuge.PublishResult{}, err - } - return a.Client.Publish("add_filter", data) -} - -// SetFilter (re)sets a filter to the agent -func (a *AgentClient) SetFilter(regex string, bloomFilter *BloomProcessorFilter) (centrifuge.PublishResult, error) { - filter := new(bytes.Buffer) - _, err := bloomFilter.Filter.WriteTo(filter) - if err != nil { - return centrifuge.PublishResult{}, err - } - - msg := SetFilterMessage{ - Regex: regex, - Filter: filter.Bytes(), - Timestamp: time.Now().Unix(), - } - - var data []byte - data, err = json.Marshal(msg) - if err != nil { - return centrifuge.PublishResult{}, err - } - return a.Client.Publish("set_filter", data) -} - -// newCentrifugeClient will create a new Centrifuge using the provided handler and default configurations -func newCentrifugeClient(wsURL string, handler SocketHandler) MonitorClient { - c := centrifuge.NewJsonClient(wsURL, centrifuge.DefaultConfig()) // todo: use our own defaults/custom options - - c.OnConnect(handler) - c.OnDisconnect(handler) - c.OnError(handler) - c.OnMessage(handler) - c.OnServerJoin(handler) - c.OnServerLeave(handler) - c.OnServerPublish(handler) - c.OnServerSubscribe(handler) - c.OnServerUnsubscribe(handler) - - return &AgentClient{Client: c} -} diff --git a/client.go b/client.go index 5e821645..1da30707 100644 --- a/client.go +++ b/client.go @@ -2,7 +2,6 @@ package bux import ( "context" - "fmt" "time" "github.com/BuxOrg/bux/chainstate" @@ -35,7 +34,6 @@ type ( encryptionKey string // Encryption key for encrypting sensitive information (IE: paymail xPub) (hex encoded key) httpClient HTTPInterface // HTTP interface to use importBlockHeadersURL string // The URL of the block headers zip file to import old block headers on startup. if block 0 is found in the DB, block headers will mpt be downloaded - itc bool // (Incoming Transactions Check) True will check incoming transactions via Miners (real-world) iuc bool // (Input UTXO Check) True will check input utxos when saving transactions logger *zerolog.Logger // Internal logging models *modelOptions // Configuration options for the loaded models @@ -188,13 +186,6 @@ func NewClient(ctx context.Context, opts ...ClientOps) (ClientInterface, error) return nil, err } - // Load the blockchain monitor - if client.options.chainstate.Monitor() != nil { - if err = client.loadMonitor(ctx); err != nil { - return nil, err - } - } - // Default paymail server config (generic capabilities and domain check disabled) if client.options.paymail.serverConfig.Configuration == nil { if err = client.loadDefaultPaymailConfig(); err != nil { @@ -267,19 +258,6 @@ func (c *Client) Close(ctx context.Context) error { defer txn.StartSegment("close_all").End() } - // If we loaded a Monitor, remove the long-lasting lock-key before closing cachestore - cs := c.Cachestore() - m := c.Chainstate().Monitor() - if m != nil && cs != nil && len(m.GetLockID()) > 0 { - _ = cs.Delete(ctx, fmt.Sprintf(lockKeyMonitorLockID, m.GetLockID())) - } - - // Close Cachestore - if cs != nil { - cs.Close(ctx) - c.options.cacheStore.ClientInterface = nil - } - // Close Chainstate ch := c.Chainstate() if ch != nil { @@ -395,16 +373,6 @@ func (c *Client) IsNewRelicEnabled() bool { return c.options.newRelic.enabled } -// IsMempoolMonitoringEnabled will return whether mempool monitoring is on -func (c *Client) IsMempoolMonitoringEnabled() bool { - return c.options.chainstate.IsNewRelicEnabled() -} - -// IsITCEnabled will return the flag (bool) -func (c *Client) IsITCEnabled() bool { - return c.options.itc -} - // IsIUCEnabled will return the flag (bool) func (c *Client) IsIUCEnabled() bool { return c.options.iuc diff --git a/client_internal.go b/client_internal.go index 53eb2ca3..e7ed26ce 100644 --- a/client_internal.go +++ b/client_internal.go @@ -2,8 +2,6 @@ package bux import ( "context" - "time" - "github.com/BuxOrg/bux/chainstate" "github.com/BuxOrg/bux/cluster" "github.com/BuxOrg/bux/notifications" @@ -122,61 +120,6 @@ func (c *Client) loadTaskmanager(ctx context.Context) (err error) { return } -// loadMonitor will load the default Monitor -// -// Cachestore is required to be loaded before this method is called -func (c *Client) loadMonitor(ctx context.Context) (err error) { - // Check if the monitor was set by the user - monitor := c.options.chainstate.Monitor() - if monitor == nil { - return // No monitor, exit! - } - - // Create a handler and load destinations if option has been set - handler := NewMonitorHandler(ctx, c, monitor) - - // Start the default monitor - if err = startDefaultMonitor(ctx, c, monitor); err != nil { - return err - } - - lockKey := c.options.cluster.GetClusterPrefix() + lockKeyMonitorLockID - lockID := monitor.GetLockID() - go func() { - var currentLock string - for { - if currentLock, err = c.Cachestore().WriteLockWithSecret(ctx, lockKey, lockID, defaultMonitorLockTTL); err != nil { - // do nothing really, we just didn't get the lock - if monitor.IsDebug() { - monitor.Logger().Info().Msgf("[MONITOR] failed getting lock for monitor: %s: %e", lockID, err) - } - } - - if lockID == currentLock { - // Start the monitor, if not connected - if !monitor.IsConnected() { - if err = monitor.Start(ctx, &handler, func() { - _, err = c.Cachestore().ReleaseLock(ctx, lockKeyMonitorLockID, lockID) - }); err != nil { - monitor.Logger().Error().Msgf("[MONITOR] failed starting monitor: %e", err) - } - } - } else { - // first close any monitor if running - if monitor.IsConnected() { - if err = monitor.Stop(ctx); err != nil { - monitor.Logger().Error().Msgf("[MONITOR] failed stopping monitor: %e", err) - } - } - } - - time.Sleep(defaultMonitorSleep) - } - }() - - return nil -} - // runModelMigrations will run the model Migrate() method for all models func (c *Client) runModelMigrations(models ...interface{}) (err error) { // If the migrations are disabled, just return diff --git a/client_options.go b/client_options.go index 7a439a9f..615b2cd8 100644 --- a/client_options.go +++ b/client_options.go @@ -41,9 +41,6 @@ func defaultClientOptions() *clientOptions { datastoreLogger := logging.CreateGormLoggerAdapter(&dWarnLogger, "datastore") // Set the default options return &clientOptions{ - // Incoming Transaction Checker (lookup external tx via miner for validity) - itc: true, - // By default check input utxos (unless disabled by the user) iuc: true, @@ -239,13 +236,6 @@ func WithModels(models ...interface{}) ClientOps { } } -// WithITCDisabled will disable (ITC) incoming transaction checking -func WithITCDisabled() ClientOps { - return func(c *clientOptions) { - c.itc = false - } -} - // WithIUCDisabled will disable checking the input utxos func WithIUCDisabled() ClientOps { return func(c *clientOptions) { diff --git a/client_options_test.go b/client_options_test.go index d31e3133..fc359e7e 100644 --- a/client_options_test.go +++ b/client_options_test.go @@ -25,6 +25,7 @@ import ( // TestNewRelicOptions will test the method enable() func Test_newRelicOptions_enable(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("enable with valid app", func(t *testing.T) { app, err := tester.GetNewRelicApp(defaultNewRelicApp) @@ -33,6 +34,7 @@ func Test_newRelicOptions_enable(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithNewRelic(app)) + opts = append(opts, WithLogger(&testLogger)) var tc ClientInterface tc, err = NewClient( @@ -50,6 +52,7 @@ func Test_newRelicOptions_enable(t *testing.T) { t.Run("enable with invalid app", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithNewRelic(nil)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -64,15 +67,15 @@ func Test_newRelicOptions_enable(t *testing.T) { // Test_newRelicOptions_getOrStartTxn will test the method getOrStartTxn() func Test_newRelicOptions_getOrStartTxn(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("Get a valid ctx and txn", func(t *testing.T) { app, err := tester.GetNewRelicApp(defaultNewRelicApp) require.NoError(t, err) require.NotNil(t, app) - logger := zerolog.Nop() opts := DefaultClientOpts(false, true) - opts = append(opts, WithNewRelic(app), WithLogger(&logger)) + opts = append(opts, WithNewRelic(app), WithLogger(&testLogger)) var tc ClientInterface tc, err = NewClient( @@ -91,9 +94,8 @@ func Test_newRelicOptions_getOrStartTxn(t *testing.T) { }) t.Run("invalid ctx and txn", func(t *testing.T) { - logger := zerolog.Nop() opts := DefaultClientOpts(false, true) - opts = append(opts, WithNewRelic(nil), WithLogger(&logger)) + opts = append(opts, WithNewRelic(nil), WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -134,8 +136,6 @@ func TestClient_defaultModelOptions(t *testing.T) { require.NotNil(t, dco.taskManager) - assert.Equal(t, true, dco.itc) - assert.Nil(t, dco.logger) }) } @@ -143,6 +143,7 @@ func TestClient_defaultModelOptions(t *testing.T) { // TestWithUserAgent will test the method WithUserAgent() func TestWithUserAgent(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithUserAgent("") @@ -150,9 +151,8 @@ func TestWithUserAgent(t *testing.T) { }) t.Run("empty user agent", func(t *testing.T) { - logger := zerolog.Nop() opts := DefaultClientOpts(false, true) - opts = append(opts, WithUserAgent(""), WithLogger(&logger)) + opts = append(opts, WithUserAgent(""), WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -166,9 +166,8 @@ func TestWithUserAgent(t *testing.T) { t.Run("custom user agent", func(t *testing.T) { customAgent := "custom-user-agent" - logger := zerolog.Nop() opts := DefaultClientOpts(false, true) - opts = append(opts, WithUserAgent(customAgent), WithLogger(&logger)) + opts = append(opts, WithUserAgent(customAgent), WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -217,6 +216,7 @@ func TestWithDebugging(t *testing.T) { // TestWithEncryption will test the method WithEncryption() func TestWithEncryption(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithEncryption("") @@ -226,6 +226,7 @@ func TestWithEncryption(t *testing.T) { t.Run("empty key", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithEncryption("")) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -239,6 +240,7 @@ func TestWithEncryption(t *testing.T) { key, _ := utils.RandomHex(32) opts := DefaultClientOpts(false, true) opts = append(opts, WithEncryption(key)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -251,6 +253,8 @@ func TestWithEncryption(t *testing.T) { // TestWithRedis will test the method WithRedis() func TestWithRedis(t *testing.T) { + testLogger := zerolog.Nop() + t.Run("check type", func(t *testing.T) { opt := WithRedis(nil) assert.IsType(t, *new(ClientOps), opt) @@ -269,6 +273,7 @@ func TestWithRedis(t *testing.T) { }), WithSQLite(tester.SQLiteTestConfig(false, true)), WithMinercraft(&chainstate.MinerCraftBase{}), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -292,6 +297,7 @@ func TestWithRedis(t *testing.T) { }), WithSQLite(tester.SQLiteTestConfig(false, true)), WithMinercraft(&chainstate.MinerCraftBase{}), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -305,6 +311,8 @@ func TestWithRedis(t *testing.T) { // TestWithRedisConnection will test the method WithRedisConnection() func TestWithRedisConnection(t *testing.T) { + testLogger := zerolog.Nop() + t.Run("check type", func(t *testing.T) { opt := WithRedisConnection(nil) assert.IsType(t, *new(ClientOps), opt) @@ -317,6 +325,7 @@ func TestWithRedisConnection(t *testing.T) { WithRedisConnection(nil), WithSQLite(tester.SQLiteTestConfig(false, true)), WithMinercraft(&chainstate.MinerCraftBase{}), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -338,6 +347,7 @@ func TestWithRedisConnection(t *testing.T) { WithRedisConnection(client), WithSQLite(tester.SQLiteTestConfig(false, true)), WithMinercraft(&chainstate.MinerCraftBase{}), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -359,12 +369,14 @@ func TestWithFreeCache(t *testing.T) { }) t.Run("using FreeCache", func(t *testing.T) { + testLogger := zerolog.Nop() tc, err := NewClient( tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), WithFreeCache(), WithTaskqConfig(taskmanager.DefaultTaskQConfig(testQueueName)), WithSQLite(&datastore.SQLiteConfig{Shared: true}), - WithMinercraft(&chainstate.MinerCraftBase{})) + WithMinercraft(&chainstate.MinerCraftBase{}), + WithLogger(&testLogger)) require.NoError(t, err) require.NotNil(t, tc) defer CloseClient(context.Background(), t, tc) @@ -378,6 +390,7 @@ func TestWithFreeCache(t *testing.T) { // TestWithFreeCacheConnection will test the method WithFreeCacheConnection() func TestWithFreeCacheConnection(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithFreeCacheConnection(nil) @@ -385,15 +398,13 @@ func TestWithFreeCacheConnection(t *testing.T) { }) t.Run("using a nil client", func(t *testing.T) { - logger := zerolog.Nop() - tc, err := NewClient( tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), WithFreeCacheConnection(nil), WithTaskqConfig(taskmanager.DefaultTaskQConfig(testQueueName)), WithSQLite(&datastore.SQLiteConfig{Shared: true}), WithMinercraft(&chainstate.MinerCraftBase{}), - WithLogger(&logger), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -407,14 +418,13 @@ func TestWithFreeCacheConnection(t *testing.T) { t.Run("using an existing connection", func(t *testing.T) { fc := freecache.NewCache(cachestore.DefaultCacheSize) - logger := zerolog.Nop() tc, err := NewClient( tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), WithFreeCacheConnection(fc), WithTaskqConfig(taskmanager.DefaultTaskQConfig(testQueueName)), WithSQLite(&datastore.SQLiteConfig{Shared: true}), WithMinercraft(&chainstate.MinerCraftBase{}), - WithLogger(&logger), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -429,10 +439,12 @@ func TestWithFreeCacheConnection(t *testing.T) { // TestWithPaymailClient will test the method WithPaymailClient() func TestWithPaymailClient(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("using a nil driver, automatically makes paymail client", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithPaymailClient(nil)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -449,6 +461,7 @@ func TestWithPaymailClient(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithPaymailClient(p)) + opts = append(opts, WithLogger(&testLogger)) var tc ClientInterface tc, err = NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) @@ -464,13 +477,13 @@ func TestWithPaymailClient(t *testing.T) { // TestWithTaskQ will test the method WithTaskQ() func TestWithTaskQ(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() // todo: test cases where config is nil, or cannot load TaskQ t.Run("using taskq using memory", func(t *testing.T) { - logger := zerolog.Nop() tcOpts := DefaultClientOpts(true, true) - tcOpts = append(tcOpts, WithLogger(&logger)) + tcOpts = append(tcOpts, WithLogger(&testLogger)) tc, err := NewClient( tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), @@ -490,8 +503,6 @@ func TestWithTaskQ(t *testing.T) { t.Skip("skipping live local redis tests") } - logger := zerolog.Nop() - tc, err := NewClient( tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), WithTaskqConfig( @@ -502,7 +513,7 @@ func TestWithTaskQ(t *testing.T) { }), WithSQLite(tester.SQLiteTestConfig(false, true)), WithMinercraft(&chainstate.MinerCraftBase{}), - WithLogger(&logger), + WithLogger(&testLogger), ) require.NoError(t, err) require.NotNil(t, tc) @@ -553,6 +564,7 @@ func TestWithLogger(t *testing.T) { // TestWithModels will test the method WithModels() func TestWithModels(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithModels() @@ -562,6 +574,7 @@ func TestWithModels(t *testing.T) { t.Run("empty models - returns default models", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithModels()) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -570,7 +583,7 @@ func TestWithModels(t *testing.T) { assert.Equal(t, []string{ ModelXPub.String(), ModelAccessKey.String(), - ModelDraftTransaction.String(), ModelIncomingTransaction.String(), + ModelDraftTransaction.String(), ModelTransaction.String(), ModelBlockHeader.String(), ModelSyncTransaction.String(), ModelDestination.String(), ModelUtxo.String(), @@ -580,6 +593,7 @@ func TestWithModels(t *testing.T) { t.Run("add custom models", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithModels(newPaymail(testPaymail))) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -588,7 +602,7 @@ func TestWithModels(t *testing.T) { assert.Equal(t, []string{ ModelXPub.String(), ModelAccessKey.String(), - ModelDraftTransaction.String(), ModelIncomingTransaction.String(), + ModelDraftTransaction.String(), ModelTransaction.String(), ModelBlockHeader.String(), ModelSyncTransaction.String(), ModelDestination.String(), ModelUtxo.String(), ModelPaymailAddress.String(), @@ -596,42 +610,10 @@ func TestWithModels(t *testing.T) { }) } -// TestWithITCDisabled will test the method WithITCDisabled() -func TestWithITCDisabled(t *testing.T) { - t.Parallel() - - t.Run("check type", func(t *testing.T) { - opt := WithITCDisabled() - assert.IsType(t, *new(ClientOps), opt) - }) - - t.Run("default options", func(t *testing.T) { - opts := DefaultClientOpts(false, true) - - tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) - require.NoError(t, err) - require.NotNil(t, tc) - defer CloseClient(context.Background(), t, tc) - - assert.Equal(t, true, tc.IsITCEnabled()) - }) - - t.Run("itc disabled", func(t *testing.T) { - opts := DefaultClientOpts(false, true) - opts = append(opts, WithITCDisabled()) - - tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) - require.NoError(t, err) - require.NotNil(t, tc) - defer CloseClient(context.Background(), t, tc) - - assert.Equal(t, false, tc.IsITCEnabled()) - }) -} - // TestWithIUCDisabled will test the method WithIUCDisabled() func TestWithIUCDisabled(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithIUCDisabled() @@ -640,6 +622,7 @@ func TestWithIUCDisabled(t *testing.T) { t.Run("default options", func(t *testing.T) { opts := DefaultClientOpts(false, true) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -649,9 +632,10 @@ func TestWithIUCDisabled(t *testing.T) { assert.Equal(t, true, tc.IsIUCEnabled()) }) - t.Run("itc disabled", func(t *testing.T) { + t.Run("iuc disabled", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithIUCDisabled()) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -665,6 +649,7 @@ func TestWithIUCDisabled(t *testing.T) { // TestWithImportBlockHeaders will test the method WithImportBlockHeaders() func TestWithImportBlockHeaders(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithImportBlockHeaders("") @@ -674,6 +659,7 @@ func TestWithImportBlockHeaders(t *testing.T) { t.Run("empty url", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithImportBlockHeaders("")) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -688,6 +674,7 @@ func TestWithImportBlockHeaders(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithImportBlockHeaders(customURL)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -701,6 +688,7 @@ func TestWithImportBlockHeaders(t *testing.T) { // TestWithHTTPClient will test the method WithHTTPClient() func TestWithHTTPClient(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithHTTPClient(nil) @@ -710,6 +698,7 @@ func TestWithHTTPClient(t *testing.T) { t.Run("test applying nil", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithHTTPClient(nil)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -723,6 +712,7 @@ func TestWithHTTPClient(t *testing.T) { customClient := &http.Client{} opts := DefaultClientOpts(false, true) opts = append(opts, WithHTTPClient(customClient)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -736,6 +726,7 @@ func TestWithHTTPClient(t *testing.T) { // TestWithCustomCachestore will test the method WithCustomCachestore() func TestWithCustomCachestore(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithCustomCachestore(nil) @@ -745,6 +736,7 @@ func TestWithCustomCachestore(t *testing.T) { t.Run("test applying nil", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithCustomCachestore(nil)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -760,6 +752,7 @@ func TestWithCustomCachestore(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithCustomCachestore(customCache)) + opts = append(opts, WithLogger(&testLogger)) var tc ClientInterface tc, err = NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) @@ -774,6 +767,7 @@ func TestWithCustomCachestore(t *testing.T) { // TestWithCustomDatastore will test the method WithCustomDatastore() func TestWithCustomDatastore(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithCustomDatastore(nil) @@ -783,6 +777,7 @@ func TestWithCustomDatastore(t *testing.T) { t.Run("test applying nil", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithCustomDatastore(nil)) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -798,6 +793,7 @@ func TestWithCustomDatastore(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithCustomDatastore(customData)) + opts = append(opts, WithLogger(&testLogger)) var tc ClientInterface tc, err = NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) @@ -817,6 +813,7 @@ func TestWithCustomDatastore(t *testing.T) { // TestWithAutoMigrate will test the method WithAutoMigrate() func TestWithAutoMigrate(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithAutoMigrate() @@ -826,6 +823,7 @@ func TestWithAutoMigrate(t *testing.T) { t.Run("no additional models, just base models", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithAutoMigrate()) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -836,7 +834,6 @@ func TestWithAutoMigrate(t *testing.T) { ModelXPub.String(), ModelAccessKey.String(), ModelDraftTransaction.String(), - ModelIncomingTransaction.String(), ModelTransaction.String(), ModelBlockHeader.String(), ModelSyncTransaction.String(), @@ -848,6 +845,7 @@ func TestWithAutoMigrate(t *testing.T) { t.Run("one additional model", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithAutoMigrate(newPaymail(testPaymail))) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -858,7 +856,6 @@ func TestWithAutoMigrate(t *testing.T) { ModelXPub.String(), ModelAccessKey.String(), ModelDraftTransaction.String(), - ModelIncomingTransaction.String(), ModelTransaction.String(), ModelBlockHeader.String(), ModelSyncTransaction.String(), @@ -872,6 +869,7 @@ func TestWithAutoMigrate(t *testing.T) { // TestWithMigrationDisabled will test the method WithMigrationDisabled() func TestWithMigrationDisabled(t *testing.T) { t.Parallel() + testLogger := zerolog.Nop() t.Run("check type", func(t *testing.T) { opt := WithMigrationDisabled() @@ -880,6 +878,7 @@ func TestWithMigrationDisabled(t *testing.T) { t.Run("default options", func(t *testing.T) { opts := DefaultClientOpts(false, true) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) @@ -892,6 +891,7 @@ func TestWithMigrationDisabled(t *testing.T) { t.Run("migration disabled", func(t *testing.T) { opts := DefaultClientOpts(false, true) opts = append(opts, WithMigrationDisabled()) + opts = append(opts, WithLogger(&testLogger)) tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) require.NoError(t, err) diff --git a/cron_job_declarations.go b/cron_job_declarations.go index 0f1abe30..68a05957 100644 --- a/cron_job_declarations.go +++ b/cron_job_declarations.go @@ -10,7 +10,6 @@ import ( // Cron job names to be used in WithCronCustomPeriod const ( CronJobNameDraftTransactionCleanUp = "draft_transaction_clean_up" - CronJobNameIncomingTransaction = "incoming_transaction_process" CronJobNameSyncTransactionBroadcast = "sync_transaction_broadcast" CronJobNameSyncTransactionSync = "sync_transaction_sync" ) @@ -31,10 +30,6 @@ func (c *Client) cronJobs() taskmanager.CronJobs { Period: 60 * time.Second, Handler: handler(taskCleanupDraftTransactions), }, - CronJobNameIncomingTransaction: { - Period: 30 * time.Second, - Handler: handler(taskProcessIncomingTransactions), - }, CronJobNameSyncTransactionBroadcast: { Period: 30 * time.Second, Handler: handler(taskBroadcastTransactions), diff --git a/cron_job_definitions.go b/cron_job_definitions.go index f900b102..6ccf3d95 100644 --- a/cron_job_definitions.go +++ b/cron_job_definitions.go @@ -53,17 +53,6 @@ func taskCleanupDraftTransactions(ctx context.Context, client *Client) error { return nil } -// taskProcessIncomingTransactions will process any incoming transactions found -func taskProcessIncomingTransactions(ctx context.Context, client *Client) error { - client.Logger().Info().Msg("running process incoming transaction(s) task...") - - err := processIncomingTransactions(ctx, client.Logger(), 10, WithClient(client)) - if err == nil || errors.Is(err, datastore.ErrNoResults) { - return nil - } - return err -} - // taskBroadcastTransactions will broadcast any transactions func taskBroadcastTransactions(ctx context.Context, client *Client) error { client.Logger().Info().Msg("running broadcast transaction(s) task...") diff --git a/definitions.go b/definitions.go index bcc12d75..4e572112 100644 --- a/definitions.go +++ b/definitions.go @@ -6,16 +6,14 @@ import ( // Defaults for engine functionality const ( - changeOutputSize = uint64(35) // Average size in bytes of a change output - databaseLongReadTimeout = 30 * time.Second // For all "GET" or "SELECT" methods - defaultBroadcastTimeout = 25 * time.Second // Default timeout for broadcasting - defaultCacheLockTTL = 20 // in Seconds - defaultCacheLockTTW = 10 // in Seconds - defaultDatabaseReadTimeout = 20 * time.Second // For all "GET" or "SELECT" methods - defaultDraftTxExpiresIn = 20 * time.Second // Default TTL for draft transactions - defaultHTTPTimeout = 20 * time.Second // Default timeout for HTTP requests - defaultMonitorSleep = 2 * time.Second - defaultMonitorLockTTL = 10 // in seconds - should be larger than defaultMonitorSleep + changeOutputSize = uint64(35) // Average size in bytes of a change output + databaseLongReadTimeout = 30 * time.Second // For all "GET" or "SELECT" methods + defaultBroadcastTimeout = 25 * time.Second // Default timeout for broadcasting + defaultCacheLockTTL = 20 // in Seconds + defaultCacheLockTTW = 10 // in Seconds + defaultDatabaseReadTimeout = 20 * time.Second // For all "GET" or "SELECT" methods + defaultDraftTxExpiresIn = 20 * time.Second // Default TTL for draft transactions + defaultHTTPTimeout = 20 * time.Second // Default timeout for HTTP requests defaultOverheadSize = uint64(8) // 8 bytes is the default overhead in a transaction = 4 bytes version + 4 bytes nLockTime defaultQueryTxTimeout = 10 * time.Second // Default timeout for syncing on-chain information defaultSleepForNewBlockHeaders = 30 * time.Second // Default wait before checking for a new unprocessed block @@ -28,18 +26,17 @@ const ( // All the base models const ( - ModelAccessKey ModelName = "access_key" - ModelBlockHeader ModelName = "block_header" - ModelDestination ModelName = "destination" - ModelDraftTransaction ModelName = "draft_transaction" - ModelIncomingTransaction ModelName = "incoming_transaction" - ModelMetadata ModelName = "metadata" - ModelNameEmpty ModelName = "empty" - ModelPaymailAddress ModelName = "paymail_address" - ModelSyncTransaction ModelName = "sync_transaction" - ModelTransaction ModelName = "transaction" - ModelUtxo ModelName = "utxo" - ModelXPub ModelName = "xpub" + ModelAccessKey ModelName = "access_key" + ModelBlockHeader ModelName = "block_header" + ModelDestination ModelName = "destination" + ModelDraftTransaction ModelName = "draft_transaction" + ModelMetadata ModelName = "metadata" + ModelNameEmpty ModelName = "empty" + ModelPaymailAddress ModelName = "paymail_address" + ModelSyncTransaction ModelName = "sync_transaction" + ModelTransaction ModelName = "transaction" + ModelUtxo ModelName = "utxo" + ModelXPub ModelName = "xpub" ) // AllModelNames is a list of all models @@ -47,7 +44,6 @@ var AllModelNames = []ModelName{ ModelAccessKey, ModelBlockHeader, ModelDestination, - ModelIncomingTransaction, ModelMetadata, ModelPaymailAddress, ModelPaymailAddress, @@ -59,16 +55,15 @@ var AllModelNames = []ModelName{ // Internal table names const ( - tableAccessKeys = "access_keys" - tableBlockHeaders = "block_headers" - tableDestinations = "destinations" - tableDraftTransactions = "draft_transactions" - tableIncomingTransactions = "incoming_transactions" - tablePaymailAddresses = "paymail_addresses" - tableSyncTransactions = "sync_transactions" - tableTransactions = "transactions" - tableUTXOs = "utxos" - tableXPubs = "xpubs" + tableAccessKeys = "access_keys" + tableBlockHeaders = "block_headers" + tableDestinations = "destinations" + tableDraftTransactions = "draft_transactions" + tablePaymailAddresses = "paymail_addresses" + tableSyncTransactions = "sync_transactions" + tableTransactions = "transactions" + tableUTXOs = "utxos" + tableXPubs = "xpubs" ) const ( @@ -153,11 +148,6 @@ var BaseModels = []interface{}{ Model: *NewBaseModel(ModelDraftTransaction), }, - // Incoming transactions (external & unknown) (related to Transaction & Draft) - &IncomingTransaction{ - Model: *NewBaseModel(ModelIncomingTransaction), - }, - // Finalized transactions (related to Draft) &Transaction{ Model: *NewBaseModel(ModelTransaction), diff --git a/examples/client/custom_cron/custom_cron.go b/examples/client/custom_cron/custom_cron.go index 535053bc..fed09089 100644 --- a/examples/client/custom_cron/custom_cron.go +++ b/examples/client/custom_cron/custom_cron.go @@ -11,8 +11,8 @@ import ( func main() { client, err := bux.NewClient( context.Background(), // Set context - bux.WithCronCustomPeriod(bux.CronJobNameDraftTransactionCleanUp, 2*time.Second), - bux.WithCronCustomPeriod(bux.CronJobNameIncomingTransaction, 4*time.Second), + bux.WithCronCustmPeriod(bux.CronJobNameDraftTransactionCleanUp, 2*time.Second), + bux.WithCronCustmPeriod(bux.CronJobNameSyncTransactionSync, 4*time.Second), ) if err != nil { log.Fatalln("error: " + err.Error()) diff --git a/interface.go b/interface.go index cd9f6bfa..19caeed5 100644 --- a/interface.go +++ b/interface.go @@ -82,9 +82,9 @@ type DestinationService interface { queryParams *datastore.QueryParams) ([]*Destination, error) GetDestinationsByXpubIDCount(ctx context.Context, xPubID string, usingMetadata *Metadata, conditions *map[string]interface{}) (int64, error) - NewDestination(ctx context.Context, xPubKey string, chain uint32, destinationType string, monitor bool, + NewDestination(ctx context.Context, xPubKey string, chain uint32, destinationType string, opts ...ModelOps) (*Destination, error) - NewDestinationForLockingScript(ctx context.Context, xPubID, lockingScript string, monitor bool, + NewDestinationForLockingScript(ctx context.Context, xPubID, lockingScript string, opts ...ModelOps) (*Destination, error) UpdateDestinationMetadataByID(ctx context.Context, xPubID, id string, metadata Metadata) (*Destination, error) UpdateDestinationMetadataByLockingScript(ctx context.Context, xPubID, @@ -194,7 +194,6 @@ type ClientInterface interface { ImportBlockHeadersFromURL() string IsDebug() bool IsEncryptionKeySet() bool - IsITCEnabled() bool IsIUCEnabled() bool IsMigrationEnabled() bool IsNewRelicEnabled() bool diff --git a/locks.go b/locks.go index 81c434b1..783d4d05 100644 --- a/locks.go +++ b/locks.go @@ -7,9 +7,7 @@ import ( ) const ( - lockKeyMonitorLockID = "monitor-lock-id-%s" // + Lock ID lockKeyProcessBroadcastTx = "process-broadcast-transaction-%s" // + Tx ID - lockKeyProcessIncomingTx = "process-incoming-transaction-%s" // + Tx ID lockKeyProcessP2PTx = "process-p2p-transaction-%s" // + Tx ID lockKeyProcessSyncTx = "process-sync-transaction-task" lockKeyProcessXpub = "action-xpub-id-%s" // + Xpub ID diff --git a/mock_chainstate_test.go b/mock_chainstate_test.go index 38d4b16c..3180cfc1 100644 --- a/mock_chainstate_test.go +++ b/mock_chainstate_test.go @@ -102,10 +102,6 @@ type chainStateEverythingOnChain struct { chainStateEverythingInMempool } -func (c *chainStateEverythingOnChain) Monitor() chainstate.MonitorService { - return nil -} - func (c *chainStateEverythingOnChain) BroadcastClient() broadcast.Client { return nil } diff --git a/model_destinations.go b/model_destinations.go index 22c7cc64..69e13b16 100644 --- a/model_destinations.go +++ b/model_destinations.go @@ -10,7 +10,6 @@ import ( "github.com/BuxOrg/bux/utils" "github.com/bitcoinschema/go-bitcoin/v2" "github.com/mrz1836/go-datastore" - customTypes "github.com/mrz1836/go-datastore/custom_types" ) // Destination is an object representing a BitCoin destination (address, script, etc) @@ -21,15 +20,14 @@ type Destination struct { Model `bson:",inline"` // Model specific fields - ID string `json:"id" toml:"id" yaml:"id" gorm:"<-:create;type:char(64);primaryKey;comment:This is the hash of the locking script" bson:"_id"` - XpubID string `json:"xpub_id" toml:"xpub_id" yaml:"xpub_id" gorm:"<-:create;type:char(64);index;comment:This is the related xPub" bson:"xpub_id"` - LockingScript string `json:"locking_script" toml:"locking_script" yaml:"locking_script" gorm:"<-:create;type:text;comment:This is Bitcoin output script in hex" bson:"locking_script"` - Type string `json:"type" toml:"type" yaml:"type" gorm:"<-:create;type:text;comment:Type of output" bson:"type"` - Chain uint32 `json:"chain" toml:"chain" yaml:"chain" gorm:"<-:create;type:int;comment:This is the (chain)/num location of the address related to the xPub" bson:"chain"` - Num uint32 `json:"num" toml:"num" yaml:"num" gorm:"<-:create;type:int;comment:This is the chain/(num) location of the address related to the xPub" bson:"num"` - Address string `json:"address" toml:"address" yaml:"address" gorm:"<-:create;type:varchar(35);index;comment:This is the BitCoin address" bson:"address"` - DraftID string `json:"draft_id" toml:"draft_id" yaml:"draft_id" gorm:"<-:create;type:varchar(64);index;comment:This is the related draft id (if internal tx)" bson:"draft_id,omitempty"` - Monitor customTypes.NullTime `json:"monitor" toml:"monitor" yaml:"monitor" gorm:";index;comment:When this address was last used for an external transaction, for monitoring" bson:"monitor,omitempty"` + ID string `json:"id" toml:"id" yaml:"id" gorm:"<-:create;type:char(64);primaryKey;comment:This is the hash of the locking script" bson:"_id"` + XpubID string `json:"xpub_id" toml:"xpub_id" yaml:"xpub_id" gorm:"<-:create;type:char(64);index;comment:This is the related xPub" bson:"xpub_id"` + LockingScript string `json:"locking_script" toml:"locking_script" yaml:"locking_script" gorm:"<-:create;type:text;comment:This is Bitcoin output script in hex" bson:"locking_script"` + Type string `json:"type" toml:"type" yaml:"type" gorm:"<-:create;type:text;comment:Type of output" bson:"type"` + Chain uint32 `json:"chain" toml:"chain" yaml:"chain" gorm:"<-:create;type:int;comment:This is the (chain)/num location of the address related to the xPub" bson:"chain"` + Num uint32 `json:"num" toml:"num" yaml:"num" gorm:"<-:create;type:int;comment:This is the chain/(num) location of the address related to the xPub" bson:"num"` + Address string `json:"address" toml:"address" yaml:"address" gorm:"<-:create;type:varchar(35);index;comment:This is the BitCoin address" bson:"address"` + DraftID string `json:"draft_id" toml:"draft_id" yaml:"draft_id" gorm:"<-:create;type:varchar(64);index;comment:This is the related draft id (if internal tx)" bson:"draft_id,omitempty"` } // newDestination will start a new Destination model for a locking script diff --git a/model_destinations_test.go b/model_destinations_test.go index baa1fe84..c9d18b2e 100644 --- a/model_destinations_test.go +++ b/model_destinations_test.go @@ -33,7 +33,6 @@ func TestDestination_newDestination(t *testing.T) { assert.Equal(t, ModelDestination.String(), destination.GetModelName()) assert.Equal(t, true, destination.IsNew()) assert.Equal(t, "", destination.LockingScript) - assert.Equal(t, false, destination.Monitor.Valid) assert.Equal(t, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", destination.GetID()) }) @@ -46,7 +45,6 @@ func TestDestination_newDestination(t *testing.T) { assert.Equal(t, ModelDestination.String(), destination.GetModelName()) assert.Equal(t, true, destination.IsNew()) assert.Equal(t, testScript, destination.LockingScript) - assert.Equal(t, false, destination.Monitor.Valid) assert.Equal(t, xPubID, destination.XpubID) assert.Equal(t, bscript2.ScriptTypeNonStandard, destination.Type) assert.Equal(t, testDestinationID, destination.GetID()) @@ -284,7 +282,7 @@ func TestClient_NewDestination(t *testing.T) { // Create a new destination destination, err := client.NewDestination( - ctx, rawXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, opts..., + ctx, rawXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.NoError(t, err) require.NotNil(t, destination) @@ -312,7 +310,7 @@ func TestClient_NewDestination(t *testing.T) { // Create a new destination destination, err := client.NewDestination( - ctx, "bad-value", utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + ctx, "bad-value", utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.Error(t, err) @@ -332,7 +330,7 @@ func TestClient_NewDestination(t *testing.T) { // Create a new destination destination, err := client.NewDestination( - ctx, testXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + ctx, testXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, opts..., ) require.Error(t, err) @@ -357,7 +355,7 @@ func TestClient_NewDestination(t *testing.T) { // Create a new destination destination, err := client.NewDestination( - ctx, rawXPub, utils.ChainExternal, utils.ScriptTypeMultiSig, false, + ctx, rawXPub, utils.ChainExternal, utils.ScriptTypeMultiSig, opts..., ) require.Error(t, err) @@ -381,7 +379,7 @@ func TestClient_NewDestination(t *testing.T) { // Create a new destination destination, err := client.NewDestinationForLockingScript( - ctx, utils.Hash(rawXPub), stasHex, false, + ctx, utils.Hash(rawXPub), stasHex, opts..., ) require.NoError(t, err) @@ -412,7 +410,7 @@ func (ts *EmbeddedDBTestSuite) TestDestination_Save() { // Create model tc.MockSQLDB.ExpectExec("INSERT INTO `"+tc.tablePrefix+"_destinations` ("+ "`created_at`,`updated_at`,`metadata`,`deleted_at`,`id`,`xpub_id`,`locking_script`,"+ - "`type`,`chain`,`num`,`address`,`draft_id`,`monitor`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)").WithArgs( + "`type`,`chain`,`num`,`address`,`draft_id`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)").WithArgs( tester.AnyTime{}, // created_at tester.AnyTime{}, // updated_at nil, // metadata @@ -425,7 +423,6 @@ func (ts *EmbeddedDBTestSuite) TestDestination_Save() { 0, // num destination.Address, // address testDraftID, // draft_id - nil, // monitor ).WillReturnResult(sqlmock.NewResult(1, 1)) // Commit the TX @@ -459,7 +456,7 @@ func (ts *EmbeddedDBTestSuite) TestDestination_Save() { // Create model tc.MockSQLDB.ExpectExec("INSERT INTO `"+tc.tablePrefix+"_destinations` ("+ "`created_at`,`updated_at`,`metadata`,`deleted_at`,`id`,`xpub_id`,`locking_script`,"+ - "`type`,`chain`,`num`,`address`,`draft_id`,`monitor`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?)").WithArgs( + "`type`,`chain`,`num`,`address`,`draft_id`) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)").WithArgs( tester.AnyTime{}, // created_at tester.AnyTime{}, // updated_at nil, // metadata @@ -472,7 +469,6 @@ func (ts *EmbeddedDBTestSuite) TestDestination_Save() { 0, // num destination.Address, // address testDraftID, // draft_id - nil, // monitor ).WillReturnResult(sqlmock.NewResult(1, 1)) // Commit the TX @@ -504,7 +500,7 @@ func (ts *EmbeddedDBTestSuite) TestDestination_Save() { tc.MockSQLDB.ExpectBegin() // Create model - tc.MockSQLDB.ExpectExec(`INSERT INTO "`+tc.tablePrefix+`_destinations" ("created_at","updated_at","metadata","deleted_at","id","xpub_id","locking_script","type","chain","num","address","draft_id","monitor") VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12,$13)`).WithArgs( + tc.MockSQLDB.ExpectExec(`INSERT INTO "`+tc.tablePrefix+`_destinations" ("created_at","updated_at","metadata","deleted_at","id","xpub_id","locking_script","type","chain","num","address","draft_id") VALUES ($1,$2,$3,$4,$5,$6,$7,$8,$9,$10,$11,$12)`).WithArgs( tester.AnyTime{}, // created_at tester.AnyTime{}, // updated_at nil, // metadata @@ -517,7 +513,6 @@ func (ts *EmbeddedDBTestSuite) TestDestination_Save() { 0, // num destination.Address, // address testDraftID, // draft_id - nil, // monitor ).WillReturnResult(sqlmock.NewResult(1, 1)) // Commit the TX diff --git a/model_incoming_transactions_test.go b/model_incoming_transactions_test.go deleted file mode 100644 index 779a35ac..00000000 --- a/model_incoming_transactions_test.go +++ /dev/null @@ -1,72 +0,0 @@ -package bux - -import ( - "testing" - - "github.com/BuxOrg/bux/utils" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -// TestIncomingTransaction_GetModelName will test the method GetModelName() -func TestIncomingTransaction_GetModelName(t *testing.T) { - t.Parallel() - - bTx, err := newIncomingTransaction(testTxHex, New()) - require.NoError(t, err) - - assert.Equal(t, ModelIncomingTransaction.String(), bTx.GetModelName()) -} - -// TestProcessIncomingTransaction will test the method processIncomingTransaction() -func (ts *EmbeddedDBTestSuite) TestProcessIncomingTransaction() { - - for _, testCase := range dbTestCases { - ts.T().Run(testCase.name+" - LIVE integration test - valid external incoming tx", func(t *testing.T) { - - // todo: mock the response vs using a LIVE request for Chainstate - - tc := ts.genericDBClient(t, testCase.database, true, WithCustomChainstate(&chainStateEverythingOnChain{})) - defer tc.Close(tc.ctx) - - // Create a xpub - var err error - xPubKey := "xpub6826nizKsKjNvxGbcYPiyS4tLVB3nd3e4yujBe6YmqmNtN3DMytsQMkruEgHoyUu89CHcTtaeeLynTC19fD4JcAvKXBUbHi9qdeWtUMYCQK" - xPub := newXpub(xPubKey, append(tc.client.DefaultModelOptions(), New())...) - require.NotNil(t, xPub) - - err = xPub.Save(tc.ctx) - require.NoError(t, err) - - // Create a destination - var destination *Destination - destination, err = xPub.getNewDestination(tc.ctx, utils.ChainExternal, utils.ScriptTypePubKeyHash, tc.client.DefaultModelOptions()...) - require.NoError(t, err) - require.NotNil(t, destination) - - // Save the updated xPub and new destination - err = xPub.Save(tc.ctx) - require.NoError(t, err) - - // Record an external incoming tx - txHex := "0100000001574eacf3305f561f63d6f1896566d5ff63409fea2aae1534a3e3734191b47430020000006b483045022100e3f002e318d2dfae67f00da8aa327cc905e93d4a5adb5b7c33afde95bfc26acc022000ddfcdba500e0ba9eaadde478e2b6c6566f8d6837e7802c5f867492eadfe5d1412102ff596abfae0099d480d93937380af985f5165b84ad31790c10c09d3daab8562effffffff01493a1100000000001976a914ec8470c5d9275c39829b15ea7f1997cb66082d3188ac00000000" - var tx *Transaction - tx, err = tc.client.RecordTransaction(tc.ctx, xPubKey, txHex, "", tc.client.DefaultModelOptions()...) - require.NoError(t, err) - require.NotNil(t, tx) - - // Process if found - err = processIncomingTransactions(tc.ctx, nil, 5, WithClient(tc.client)) - require.NoError(t, err) - - // Check if the tx is found in the datastore - var foundTx *Transaction - foundTx, err = tc.client.GetTransaction(tc.ctx, xPub.ID, tx.ID) - require.NoError(t, err) - require.NotNil(t, foundTx) - - // Test that we found the tx on-chain(600000 is a height of a mocked tx) - assert.Equal(t, uint64(600000), foundTx.BlockHeight) - }) - } -} diff --git a/model_transaction_config_test.go b/model_transaction_config_test.go index 684ec62f..3e7d2fdc 100644 --- a/model_transaction_config_test.go +++ b/model_transaction_config_test.go @@ -15,7 +15,7 @@ import ( ) var ( - emptyConfigJSON = "{\"change_destinations\":[{\"created_at\":\"0001-01-01T00:00:00Z\",\"updated_at\":\"0001-01-01T00:00:00Z\",\"deleted_at\":null,\"id\":\"c775e7b757ede630cd0aa1113bd102661ab38829ca52a6422ab782862f268646\",\"xpub_id\":\"1a0b10d4eda0636aae1709e7e7080485a4d99af3ca2962c6e677cf5b53d8ab8c\",\"locking_script\":\"76a9147ff514e6ae3deb46e6644caac5cdd0bf2388906588ac\",\"type\":\"pubkeyhash\",\"chain\":1,\"num\":123,\"address\":\"1CfaQw9udYNPccssFJFZ94DN8MqNZm9nGt\",\"draft_id\":\"test-reference\",\"monitor\":null}],\"change_destinations_strategy\":\"\",\"change_minimum_satoshis\":0,\"change_number_of_destinations\":0,\"change_satoshis\":124,\"expires_in\":20000000000,\"fee\":12,\"fee_unit\":{\"satoshis\":1,\"bytes\":20},\"from_utxos\":null,\"include_utxos\":null,\"inputs\":null,\"outputs\":null,\"sync\":null}" + emptyConfigJSON = "{\"change_destinations\":[{\"created_at\":\"0001-01-01T00:00:00Z\",\"updated_at\":\"0001-01-01T00:00:00Z\",\"deleted_at\":null,\"id\":\"c775e7b757ede630cd0aa1113bd102661ab38829ca52a6422ab782862f268646\",\"xpub_id\":\"1a0b10d4eda0636aae1709e7e7080485a4d99af3ca2962c6e677cf5b53d8ab8c\",\"locking_script\":\"76a9147ff514e6ae3deb46e6644caac5cdd0bf2388906588ac\",\"type\":\"pubkeyhash\",\"chain\":1,\"num\":123,\"address\":\"1CfaQw9udYNPccssFJFZ94DN8MqNZm9nGt\",\"draft_id\":\"test-reference\"}],\"change_destinations_strategy\":\"\",\"change_minimum_satoshis\":0,\"change_number_of_destinations\":0,\"change_satoshis\":124,\"expires_in\":20000000000,\"fee\":12,\"fee_unit\":{\"satoshis\":1,\"bytes\":20},\"from_utxos\":null,\"include_utxos\":null,\"inputs\":null,\"outputs\":null,\"sync\":null}" opReturn = "006a2231394878696756345179427633744870515663554551797131707a5a56646f417574324b65657020616e20657965206f6e207468697320706c61636520666f7220736f6d65204a616d696679206c6f76652e2e2e200d746578742f6d61726b646f776e055554462d38" unsetConfigJSON = "{\"change_destinations\":null,\"change_destinations_strategy\":\"\",\"change_minimum_satoshis\":0,\"change_number_of_destinations\":0,\"change_satoshis\":0,\"expires_in\":0,\"fee\":0,\"fee_unit\":null,\"from_utxos\":null,\"include_utxos\":null,\"inputs\":null,\"outputs\":null,\"sync\":null}" diff --git a/model_transactions.go b/model_transactions.go index cb919cc1..a39d2eb9 100644 --- a/model_transactions.go +++ b/model_transactions.go @@ -128,21 +128,6 @@ func newTransactionWithDraftID(txHex, draftID string, opts ...ModelOps) (*Transa return tx, nil } -// newTransactionFromIncomingTransaction will start a new transaction model using an incomingTx -func newTransactionFromIncomingTransaction(incomingTx *IncomingTransaction) (*Transaction, error) { - // Create the base - tx, err := baseTxFromHex(incomingTx.Hex, incomingTx.GetOptions(true)...) - if err != nil { - return nil, err - } - - tx.rawXpubKey = incomingTx.rawXpubKey - tx.setXPubID() - tx.Metadata = incomingTx.Metadata - - return tx, nil -} - // setXPubID will set the xPub ID on the model func (m *Transaction) setXPubID() { if len(m.rawXpubKey) > 0 && len(m.XPubID) == 0 { diff --git a/model_transactions_test.go b/model_transactions_test.go index d2bfcc94..71363588 100644 --- a/model_transactions_test.go +++ b/model_transactions_test.go @@ -853,7 +853,7 @@ func TestEndToEndTransaction(t *testing.T) { var err error destinations := make([]*Destination, 2) destinations[0], err = client.NewDestination( - ctx, rawXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + ctx, rawXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, WithMetadatas(map[string]interface{}{ testMetadataKey: testMetadataValue, }), @@ -861,7 +861,7 @@ func TestEndToEndTransaction(t *testing.T) { require.NoError(t, err) destinations[1], err = client.NewDestination( - ctx, rawXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, false, + ctx, rawXPub, utils.ChainExternal, utils.ScriptTypePubKeyHash, WithMetadatas(map[string]interface{}{ testMetadataKey + "_2": testMetadataValue + "_2", }), @@ -880,12 +880,7 @@ func TestEndToEndTransaction(t *testing.T) { require.NoError(t, err) require.NotNil(t, transaction) require.Equal(t, "", transaction.DraftID) - require.Equal(t, SyncStatusProcessing, transaction.Status) - - // Get the transaction (now after processing) - transaction, err = client.GetTransaction(ctx, rawXPub, transaction.ID) - require.NoError(t, err) - require.NotNil(t, transaction) + require.Equal(t, SyncStatusComplete, transaction.Status) require.Equal(t, SyncStatusComplete, transaction.Status) assert.Equal(t, uint32(2), transaction.NumberOfOutputs) require.Equal(t, uint64(20000), transaction.TotalValue, transaction.TotalValue) diff --git a/models_test.go b/models_test.go index 1b7a4a61..694f2ef3 100644 --- a/models_test.go +++ b/models_test.go @@ -23,7 +23,6 @@ func TestModelName_String(t *testing.T) { assert.Equal(t, "block_header", ModelBlockHeader.String()) assert.Equal(t, "destination", ModelDestination.String()) assert.Equal(t, "empty", ModelNameEmpty.String()) - assert.Equal(t, "incoming_transaction", ModelIncomingTransaction.String()) assert.Equal(t, "metadata", ModelMetadata.String()) assert.Equal(t, "paymail_address", ModelPaymailAddress.String()) assert.Equal(t, "paymail_address", ModelPaymailAddress.String()) @@ -31,7 +30,7 @@ func TestModelName_String(t *testing.T) { assert.Equal(t, "transaction", ModelTransaction.String()) assert.Equal(t, "utxo", ModelUtxo.String()) assert.Equal(t, "xpub", ModelXPub.String()) - assert.Len(t, AllModelNames, 11) + assert.Len(t, AllModelNames, 10) }) } @@ -73,9 +72,6 @@ func TestModel_GetModelName(t *testing.T) { draftTx := DraftTransaction{} assert.Equal(t, ModelDraftTransaction.String(), *datastore.GetModelName(draftTx)) - incomingTx := IncomingTransaction{} - assert.Equal(t, ModelIncomingTransaction.String(), *datastore.GetModelName(incomingTx)) - paymailAddress := PaymailAddress{} assert.Equal(t, ModelPaymailAddress.String(), *datastore.GetModelName(paymailAddress)) @@ -111,9 +107,6 @@ func TestModel_GetModelTableName(t *testing.T) { draftTx := DraftTransaction{} assert.Equal(t, tableDraftTransactions, *datastore.GetModelTableName(draftTx)) - incomingTx := IncomingTransaction{} - assert.Equal(t, tableIncomingTransactions, *datastore.GetModelTableName(incomingTx)) - paymailAddress := PaymailAddress{} assert.Equal(t, tablePaymailAddresses, *datastore.GetModelTableName(paymailAddress)) diff --git a/monitor.go b/monitor.go deleted file mode 100644 index 23fd04ca..00000000 --- a/monitor.go +++ /dev/null @@ -1,103 +0,0 @@ -package bux - -import ( - "bufio" - "context" - "errors" - "fmt" - "os" - "time" - - "github.com/BuxOrg/bux/chainstate" - "github.com/BuxOrg/bux/cluster" - "github.com/BuxOrg/bux/utils" - "github.com/mrz1836/go-datastore" -) - -// destinationMonitor is the struct of responses for Monitoring -type destinationMonitor struct { - LockingScript string `json:"locking_script" toml:"locking_script" yaml:"locking_script" bson:"locking_script"` -} - -// loadMonitoredDestinations will load destinations that should be monitored -func loadMonitoredDestinations(ctx context.Context, client ClientInterface, monitor chainstate.MonitorService) error { - - // Create conditions using the max monitor days - conditions := map[string]interface{}{ - "monitor": map[string]interface{}{ - "$gt": time.Now().Add(time.Duration(-24*monitor.GetMonitorDays()) * time.Hour), - }, - } - - // Create monitor query with max destinations - queryParams := &datastore.QueryParams{ - Page: 1, - PageSize: monitor.GetMaxNumberOfDestinations(), - OrderByField: "monitor", - SortDirection: "desc", - } - - // Get all destinations that match the query - var destinations []*destinationMonitor - if err := client.Datastore().GetModels( - ctx, &[]*Destination{}, conditions, queryParams, &destinations, defaultDatabaseReadTimeout, - ); err != nil && !errors.Is(err, datastore.ErrNoResults) { - return err - } - - // Loop all destinations and add to Monitor - for _, model := range destinations { - if err := monitor.Processor().Add(utils.P2PKHRegexpString, model.LockingScript); err != nil { - return err - } - } - - // Debug line - if client.IsDebug() && client.Logger() != nil { - client.Logger().Info().Msgf("[MONITOR] Added %d destinations to monitor with hash %s", - len(destinations), monitor.Processor().GetHash(), - ) - } - - return nil -} - -// startDefaultMonitor will create a handler, start monitor, and store the first heartbeat -func startDefaultMonitor(ctx context.Context, client ClientInterface, monitor chainstate.MonitorService) error { - - if client.Chainstate().Monitor().LoadMonitoredDestinations() { - if err := loadMonitoredDestinations(ctx, client, monitor); err != nil { - return err - } - } - - _, err := client.Cluster().Subscribe(cluster.DestinationNew, func(data string) { - if monitor.IsDebug() { - monitor.Logger().Info().Msgf("[MONITOR] added %s destination to monitor: %s", utils.P2PKHRegexpString, data) - } - if err := monitor.Processor().Add(utils.P2PKHRegexpString, data); err != nil { - client.Logger().Error().Msg("could not add destination to monitor") - } - }) - if err != nil { - return err - } - - if monitor.IsDebug() { - // capture keyboard input and allow start and stop of the monitor - go func() { - scanner := bufio.NewScanner(os.Stdin) - for scanner.Scan() { - text := scanner.Text() - fmt.Printf("KEYBOARD input: %s\n", text) - if text == "e" { - if err = monitor.Stop(ctx); err != nil { - fmt.Printf("ERROR: %s\n", err.Error()) - } - } - } - }() - } - - return nil -} diff --git a/monitor_event_handler.go b/monitor_event_handler.go deleted file mode 100644 index 7c377861..00000000 --- a/monitor_event_handler.go +++ /dev/null @@ -1,460 +0,0 @@ -package bux - -import ( - "context" - "encoding/hex" - "encoding/json" - "fmt" - "runtime" - "strings" - "sync" - "time" - - "github.com/BuxOrg/bux/chainstate" - "github.com/centrifugal/centrifuge-go" - "github.com/korovkin/limiter" - "github.com/libsv/go-bc" - "github.com/libsv/go-bt/v2" - "github.com/rs/zerolog" -) - -// MonitorEventHandler for handling transaction events from a monitor -type MonitorEventHandler struct { - blockSyncChannel chan bool - buxClient ClientInterface - ctx context.Context - debug bool - limit *limiter.ConcurrencyLimiter - logger *zerolog.Logger - monitor chainstate.MonitorService -} - -type blockSubscriptionHandler struct { - buxClient ClientInterface - ctx context.Context - debug bool - errors []error - logger *zerolog.Logger - monitor chainstate.MonitorService - wg sync.WaitGroup - unsubscribed bool -} - -func (b *blockSubscriptionHandler) OnPublish(subscription *centrifuge.Subscription, e centrifuge.PublishEvent) { - channelName := subscription.Channel() - if strings.HasPrefix(channelName, "block:sync:") { - // block subscription - tx, err := b.monitor.Processor().FilterTransactionPublishEvent(e.Data) - if err != nil { - b.errors = append(b.errors, err) - b.logger.Error().Msgf("[MONITOR] processing block data: %s", err.Error()) - } - - if tx == "" { - return - } - - if _, err = b.buxClient.RecordRawTransaction(b.ctx, tx); err != nil { - // must not override err - btTx, btErr := bt.NewTxFromString(tx) - if btErr != nil { - b.logger.Error().Msgf("[MONITOR] could not parse transaction: %v", btErr) - return - } - - b.logger.Error().Msgf("[MONITOR] recording tx %s: %v", btTx.TxID(), err) - b.errors = append(b.errors, err) - return - } - - if b.debug { - b.logger.Info().Msgf("[MONITOR] successfully recorded tx: %v", tx) - } - } -} - -func (b *blockSubscriptionHandler) OnUnsubscribe(subscription *centrifuge.Subscription, _ centrifuge.UnsubscribeEvent) { - b.logger.Info().Msgf("[MONITOR] OnUnsubscribe: %s", subscription.Channel()) - - // close wait group - if !b.unsubscribed { - b.wg.Done() - b.unsubscribed = true - } -} - -// NewMonitorHandler create a new monitor handler -func NewMonitorHandler(ctx context.Context, buxClient ClientInterface, monitor chainstate.MonitorService) MonitorEventHandler { - return MonitorEventHandler{ - blockSyncChannel: make(chan bool), - buxClient: buxClient, - ctx: ctx, - debug: monitor.IsDebug(), - limit: limiter.NewConcurrencyLimiter(runtime.NumCPU()), - logger: monitor.Logger(), - monitor: monitor, - } -} - -// OnConnect event when connected -func (h *MonitorEventHandler) OnConnect(client *centrifuge.Client, e centrifuge.ConnectEvent) { - h.logger.Info().Msgf("[MONITOR] Connected to server: %s", e.ClientID) - - agentClient := &chainstate.AgentClient{ - Client: client, - } - filters := h.monitor.Processor().GetFilters() - for regex, bloomFilter := range filters { - if _, err := agentClient.SetFilter(regex, bloomFilter); err != nil { - h.logger.Error().Msgf("[MONITOR] processing mempool: %s", err.Error()) - } - } - - h.logger.Info().Msg("[MONITOR] PROCESS BLOCK HEADERS") - if err := h.ProcessBlockHeaders(h.ctx, client); err != nil { - h.logger.Error().Msgf("[MONITOR] processing block headers: %s", err.Error()) - } - - h.logger.Info().Msg("[MONITOR] PROCESS BLOCKS") - h.blockSyncChannel = make(chan bool) - go func() { - ctx := context.Background() - if err := h.ProcessBlocks(ctx, client, h.blockSyncChannel); err != nil { - h.logger.Error().Msgf("[MONITOR] processing blocks: %s", err.Error()) - } - }() - - h.monitor.Connected() -} - -// ProcessBlocks processes all transactions in blocks that have not yet been synced -func (h *MonitorEventHandler) ProcessBlocks(ctx context.Context, client *centrifuge.Client, blockChannel chan bool) error { - h.logger.Info().Msg("[MONITOR] ProcessBlocks start") - for { - // Check if channel has been closed - select { - case <-blockChannel: - h.logger.Info().Msg("[MONITOR] block sync channel closed, stopping ProcessBlocks") - return nil - default: - // get all block headers that have not been marked as synced - blockHeaders, err := h.buxClient.GetUnsyncedBlockHeaders(ctx) - if err != nil { - h.logger.Error().Msg(err.Error()) - } else { - h.logger.Info().Msgf("[MONITOR] processing block headers: %d", len(blockHeaders)) - for _, blockHeader := range blockHeaders { - h.logger.Info().Msgf("[MONITOR] Processing block %d: %s", blockHeader.Height, blockHeader.ID) - handler := &blockSubscriptionHandler{ - buxClient: h.buxClient, - ctx: ctx, - debug: h.debug, - logger: h.logger, - monitor: h.monitor, - } - - var subscription *centrifuge.Subscription - subscription, err = client.NewSubscription("block:sync:" + blockHeader.ID) - if err != nil { - h.logger.Error().Msg(err.Error()) - } else { - h.logger.Info().Msgf("[MONITOR] Starting block subscription: %v", subscription) - subscription.OnPublish(handler) - subscription.OnUnsubscribe(handler) - - handler.wg.Add(1) - if err = subscription.Subscribe(); err != nil { - h.logger.Error().Msg(err.Error()) - } else { - h.logger.Info().Msg("[MONITOR] Waiting for wait group to finish") - handler.wg.Wait() - - _ = subscription.Close() - - if len(handler.errors) <= 0 { - // save that block header has been synced - blockHeader.Synced.Valid = true - blockHeader.Synced.Time = time.Now() - if err = blockHeader.Save(ctx); err != nil { - h.logger.Error().Msg(err.Error()) - } - } - } - } - } - } - - time.Sleep(defaultSleepForNewBlockHeaders) - } - } -} - -// ProcessBlockHeaders processes all missing block headers -func (h *MonitorEventHandler) ProcessBlockHeaders(ctx context.Context, client *centrifuge.Client) error { - lastBlockHeader, err := h.buxClient.GetLastBlockHeader(ctx) - if err != nil { - h.logger.Error().Msg(err.Error()) - return err - } - if lastBlockHeader == nil { - h.logger.Info().Msg("no last block header found, skipping...") - return nil - } - var subscription *centrifuge.Subscription - subscription, err = client.NewSubscription("block:headers:history:" + fmt.Sprint(lastBlockHeader.Height)) - if err != nil { - h.logger.Error().Msg(err.Error()) - } else { - h.logger.Info().Msgf("[MONITOR] Starting block header subscription: %v", subscription) - subscription.OnPublish(h) - if err = subscription.Subscribe(); err != nil { - h.logger.Error().Msg(err.Error()) - } - } - - return nil -} - -// OnError on error event -func (h *MonitorEventHandler) OnError(_ *centrifuge.Client, e centrifuge.ErrorEvent) { - h.logger.Error().Msgf("[MONITOR] Error: %s", e.Message) -} - -// OnMessage on new message event -func (h *MonitorEventHandler) OnMessage(_ *centrifuge.Client, e centrifuge.MessageEvent) { - var data map[string]interface{} - err := json.Unmarshal(e.Data, &data) - if err != nil { - h.logger.Error().Msgf("[MONITOR] failed unmarshalling data: %s", err.Error()) - } - - if _, ok := data["time"]; !ok { - h.logger.Error().Msgf("[MONITOR] OnMessage: %v", data) - } -} - -// OnDisconnect when disconnected -func (h *MonitorEventHandler) OnDisconnect(_ *centrifuge.Client, _ centrifuge.DisconnectEvent) { - defer close(h.blockSyncChannel) - - defer func(logger *zerolog.Logger) { - rec := recover() - if rec != nil { - logger.Error().Msgf("[MONITOR] Tried closing a closed channel: %v", rec) - } - }(h.logger) - - h.monitor.Disconnected() -} - -// OnJoin event when joining a server -func (h *MonitorEventHandler) OnJoin(_ *centrifuge.Subscription, e centrifuge.JoinEvent) { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnJoin: %v", e) - } -} - -// OnLeave event when leaving a server -func (h *MonitorEventHandler) OnLeave(_ *centrifuge.Subscription, e centrifuge.LeaveEvent) { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnLeave: %v", e) - } -} - -// OnPublish on publish event -func (h *MonitorEventHandler) OnPublish(subscription *centrifuge.Subscription, e centrifuge.PublishEvent) { - channelName := subscription.Channel() - - if strings.HasPrefix(channelName, "block:headers:history:") { - bi := chainstate.BlockInfo{} - err := json.Unmarshal(e.Data, &bi) - if err != nil { - h.logger.Error().Msgf("[MONITOR] unmarshalling block header: %v", err) - return - } - - var existingBlock *BlockHeader - if existingBlock, err = h.buxClient.GetBlockHeaderByHeight(h.ctx, uint32(bi.Height)); err != nil { - h.logger.Error().Msgf("[MONITOR] getting block header by height: %v", err) - } - - if existingBlock == nil { - merkleRoot, _ := hex.DecodeString(bi.MerkleRoot) - previousBlockHash, _ := hex.DecodeString(bi.PreviousBlockHash) - - bh := bc.BlockHeader{ - Bits: []byte(bi.Bits), - HashMerkleRoot: merkleRoot, - HashPrevBlock: previousBlockHash, - Nonce: uint32(bi.Nonce), - Time: uint32(bi.Time), - Version: uint32(bi.Version), - } - - if _, err = h.buxClient.RecordBlockHeader( - h.ctx, bi.Hash, uint32(bi.Height), bh, - ); err != nil { - h.logger.Error().Msgf("[MONITOR] recording block header: %v", err) - return - } - } - } else { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnPublish: %v", e.Data) - } - } -} - -// OnServerSubscribe on server subscribe event -func (h *MonitorEventHandler) OnServerSubscribe(_ *centrifuge.Client, e centrifuge.ServerSubscribeEvent) { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnServerSubscribe: %v", e) - } -} - -// OnServerUnsubscribe on the unsubscribe event -func (h *MonitorEventHandler) OnServerUnsubscribe(_ *centrifuge.Client, e centrifuge.ServerUnsubscribeEvent) { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnServerUnsubscribe: %v", e) - } -} - -// OnSubscribeSuccess on subscribe success -func (h *MonitorEventHandler) OnSubscribeSuccess(_ *centrifuge.Subscription, e centrifuge.SubscribeSuccessEvent) { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnSubscribeSuccess: %v", e) - } -} - -// OnSubscribeError is for an error -func (h *MonitorEventHandler) OnSubscribeError(_ *centrifuge.Subscription, e centrifuge.SubscribeErrorEvent) { - h.logger.Error().Msgf("[MONITOR] OnSubscribeError: %v", e) -} - -// OnUnsubscribe will unsubscribe -func (h *MonitorEventHandler) OnUnsubscribe(_ *centrifuge.Subscription, e centrifuge.UnsubscribeEvent) { - if h.debug { - h.logger.Info().Msgf("[MONITOR] OnUnsubscribe: %v", e) - } -} - -// OnServerJoin event when joining a server -func (h *MonitorEventHandler) OnServerJoin(_ *centrifuge.Client, e centrifuge.ServerJoinEvent) { - h.logger.Info().Msgf("[MONITOR] Joined server: %v", e) -} - -// OnServerLeave event when leaving a server -func (h *MonitorEventHandler) OnServerLeave(_ *centrifuge.Client, e centrifuge.ServerLeaveEvent) { - h.logger.Info().Msgf("[MONITOR] Left server: %v", e) -} - -// OnServerPublish on server publish event -func (h *MonitorEventHandler) OnServerPublish(c *centrifuge.Client, e centrifuge.ServerPublishEvent) { - h.logger.Info().Msgf("[MONITOR] Server publish to channel %s with data %v", e.Channel, string(e.Data)) - // todo make this configurable - // h.onServerPublishLinear(c, e) - h.onServerPublishParallel(c, e) -} - -func (h *MonitorEventHandler) processMempoolPublish(_ *centrifuge.Client, e centrifuge.ServerPublishEvent) { - tx, err := h.monitor.Processor().FilterTransactionPublishEvent(e.Data) - if err != nil { - h.logger.Error().Msgf("[MONITOR] failed to process server event: %v", err) - return - } - - if h.monitor.SaveDestinations() { - // Process transaction and save outputs - // todo: replace printf - fmt.Printf("Should save the destination here...\n") - } - - if tx == "" { - return - } - if _, err = h.buxClient.RecordRawTransaction(h.ctx, tx); err != nil { - h.logger.Error().Msgf("[MONITOR] recording tx: %v", err) - return - } - - if h.debug { - h.logger.Info().Msgf("[MONITOR] successfully recorded tx: %v", tx) - } -} - -func (h *MonitorEventHandler) processBlockHeaderPublish(client *centrifuge.Client, e centrifuge.ServerPublishEvent) { - bi := chainstate.BlockInfo{} - err := json.Unmarshal(e.Data, &bi) - if err != nil { - h.logger.Error().Msgf("[MONITOR] unmarshalling block header: %v", err) - return - } - merkleRoot, _ := hex.DecodeString(bi.MerkleRoot) - previousBlockHash, _ := hex.DecodeString(bi.PreviousBlockHash) - bh := bc.BlockHeader{ - HashPrevBlock: previousBlockHash, - HashMerkleRoot: merkleRoot, - Nonce: uint32(bi.Nonce), - Version: uint32(bi.Version), - Time: uint32(bi.Time), - Bits: []byte(bi.Bits), - } - - height := uint32(bi.Height) - var previousBlockHeader *BlockHeader - previousBlockHeader, err = getBlockHeaderByHeight(h.ctx, height-1, h.buxClient.DefaultModelOptions()...) - if err != nil { - h.logger.Error().Msgf("[MONITOR] retreiving previous block header: %v", err) - return - } - if previousBlockHeader == nil { - h.logger.Error().Msgf("[MONITOR] ERROR Previous block header not found: %d", height-1) - if err = h.ProcessBlockHeaders(h.ctx, client); err != nil { - h.logger.Error().Msgf("[MONITOR] processing block headers: %s", err.Error()) - } - return - } - - if _, err = h.buxClient.RecordBlockHeader(h.ctx, bi.Hash, height, bh); err != nil { - h.logger.Error().Msgf("[MONITOR] recording block header: %v", err) - return - } - - if h.debug { - h.logger.Info().Msgf("[MONITOR] successfully recorded blockheader: %v", bi.Hash) - } -} - -func (h *MonitorEventHandler) onServerPublishLinear(c *centrifuge.Client, e centrifuge.ServerPublishEvent) { - switch e.Channel { - case "mempool:transactions": - h.processMempoolPublish(c, e) - case "block:headers": - h.processBlockHeaderPublish(c, e) - } -} - -func (h *MonitorEventHandler) onServerPublishParallel(c *centrifuge.Client, e centrifuge.ServerPublishEvent) { - _, err := h.limit.Execute(func() { - h.onServerPublishLinear(c, e) - }) - if err != nil { - h.logger.Error().Msgf("[MONITOR] failed to start goroutine: %v", err) - } -} - -// SetMonitor sets the monitor for the given handler -func (h *MonitorEventHandler) SetMonitor(monitor *chainstate.Monitor) { - h.monitor = monitor -} - -// RecordTransaction records a transaction into bux -func (h *MonitorEventHandler) RecordTransaction(ctx context.Context, txHex string) error { - _, err := h.buxClient.RecordRawTransaction(ctx, txHex) - return err -} - -// RecordBlockHeader records a block header into bux -func (h *MonitorEventHandler) RecordBlockHeader(_ context.Context, _ bc.BlockHeader) error { - return nil -} diff --git a/paymail_service_provider.go b/paymail_service_provider.go index 5c8bd1b7..2c28f8cd 100644 --- a/paymail_service_provider.go +++ b/paymail_service_provider.go @@ -2,12 +2,8 @@ package bux import ( "context" - "database/sql" "encoding/hex" "fmt" - "reflect" - "time" - "github.com/BuxOrg/bux/chainstate" "github.com/BuxOrg/bux/utils" "github.com/bitcoin-sv/go-paymail" @@ -16,7 +12,7 @@ import ( "github.com/bitcoin-sv/go-paymail/spv" "github.com/bitcoinschema/go-bitcoin/v2" "github.com/libsv/go-bk/bec" - customTypes "github.com/mrz1836/go-datastore/custom_types" + "reflect" ) // PaymailDefaultServiceProvider is an interface for overriding the paymail actions in go-paymail/server @@ -89,7 +85,7 @@ func (p *PaymailDefaultServiceProvider) CreateAddressResolutionResponse( return nil, err } destination, err := createDestination( - ctx, paymailAddress, pubKey, true, append(p.client.DefaultModelOptions(), WithMetadatas(metadata))..., + ctx, paymailAddress, pubKey, append(p.client.DefaultModelOptions(), WithMetadatas(metadata))..., ) if err != nil { return nil, err @@ -127,7 +123,7 @@ func (p *PaymailDefaultServiceProvider) CreateP2PDestinationResponse( return nil, err } destination, err = createDestination( - ctx, paymailAddress, pubKey, false, append(p.client.DefaultModelOptions(), WithMetadatas(metadata))..., + ctx, paymailAddress, pubKey, append(p.client.DefaultModelOptions(), WithMetadatas(metadata))..., ) if err != nil { return nil, err @@ -244,7 +240,7 @@ func getXpubForPaymail(ctx context.Context, client ClientInterface, paymailAddre ) } -func createDestination(ctx context.Context, paymailAddress *PaymailAddress, pubKey *derivedPubKey, monitor bool, opts ...ModelOps) (destination *Destination, err error) { +func createDestination(ctx context.Context, paymailAddress *PaymailAddress, pubKey *derivedPubKey, opts ...ModelOps) (destination *Destination, err error) { lockingScript, err := createLockingScript(pubKey.ecPubKey) if err != nil { return nil, err @@ -256,14 +252,6 @@ func createDestination(ctx context.Context, paymailAddress *PaymailAddress, pubK destination.Chain = utils.ChainExternal destination.Num = pubKey.chainNum - // Only on for basic address resolution, not enabled for p2p - if monitor { - destination.Monitor = customTypes.NullTime{NullTime: sql.NullTime{ - Valid: true, - Time: time.Now(), - }} - } - if err = destination.Save(ctx); err != nil { return nil, err } diff --git a/record_tx_strategy_external_incoming_tx.go b/record_tx_strategy_external_incoming_tx.go index b4b68b65..758f69bf 100644 --- a/record_tx_strategy_external_incoming_tx.go +++ b/record_tx_strategy_external_incoming_tx.go @@ -16,12 +16,6 @@ type externalIncomingTx struct { func (strategy *externalIncomingTx) Execute(ctx context.Context, c ClientInterface, opts []ModelOps) (*Transaction, error) { logger := c.Logger() - - // process - if !strategy.broadcastNow && c.IsITCEnabled() { // do not save transaction to database now, save IncomingTransaction instead and let task manager handle and process it - return _addTxToCheck(ctx, strategy, c, opts) - } - transaction, err := _createExternalTxToRecord(ctx, strategy, c, opts) if err != nil { return nil, fmt.Errorf("creation of external incoming tx failed. Reason: %w", err) @@ -79,31 +73,6 @@ func (strategy *externalIncomingTx) FailOnBroadcastError(forceFail bool) { strategy.allowBroadcastErrors = !forceFail } -func _addTxToCheck(ctx context.Context, tx *externalIncomingTx, c ClientInterface, opts []ModelOps) (*Transaction, error) { - logger := c.Logger() - - incomingTx, err := newIncomingTransaction(tx.Hex, c.DefaultModelOptions(append(opts, New())...)...) - if err != nil { - return nil, fmt.Errorf("tx creation failed. Reason: %w", err) - } - - logger.Info(). - Str("txID", incomingTx.ID). - Msg("start ITC") - - if err = incomingTx.Save(ctx); err != nil { - return nil, fmt.Errorf("adding new IncomingTx to check queue failed. Reason: %w", err) - } - - result := incomingTx.toTransactionDto() - result.Status = statusProcessing - - logger.Info(). - Str("txID", incomingTx.ID). - Msg("complete ITC") - return result, nil -} - func _createExternalTxToRecord(ctx context.Context, eTx *externalIncomingTx, c ClientInterface, opts []ModelOps) (*Transaction, error) { // Create NEW tx model tx, err := txFromHex(eTx.Hex, c.DefaultModelOptions(append(opts, New())...)...) From a75a2d47b4b1230417da69733ce5695a2f1f7be2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Lewandowski?= <35259896+pawellewandowski98@users.noreply.github.com> Date: Tue, 16 Jan 2024 13:13:39 +0100 Subject: [PATCH 02/13] chore(BUX-417): remove address resolution methods (#537) --- client.go | 1 - client_internal.go | 5 - client_options.go | 12 +- client_paymail.go | 8 - client_test.go | 23 --- definitions.go | 19 +-- examples/client/mysql/mysql.go | 2 +- .../client/paymail_support/paymail_support.go | 1 - model_draft_transactions.go | 3 - model_transaction_config.go | 52 +----- model_transaction_config_test.go | 51 +----- paymail.go | 57 +------ paymail_test.go | 157 ------------------ 13 files changed, 26 insertions(+), 365 deletions(-) diff --git a/client.go b/client.go index 1da30707..a031b2cf 100644 --- a/client.go +++ b/client.go @@ -106,7 +106,6 @@ type ( *server.Configuration // Server configuration if Paymail is enabled options []server.ConfigOps // Options for the paymail server DefaultFromPaymail string // IE: from@domain.com - DefaultNote string // IE: some note for address resolution } // taskManagerOptions holds the configuration for taskmanager diff --git a/client_internal.go b/client_internal.go index e7ed26ce..8b746878 100644 --- a/client_internal.go +++ b/client_internal.go @@ -161,11 +161,6 @@ func (c *Client) loadDefaultPaymailConfig() (err error) { c.options.paymail.serverConfig.DefaultFromPaymail = defaultSenderPaymail } - // Default note for address resolution - if len(c.options.paymail.serverConfig.DefaultNote) == 0 { - c.options.paymail.serverConfig.DefaultNote = defaultAddressResolutionPurpose - } - // Set default options if none are found if len(c.options.paymail.serverConfig.options) == 0 { c.options.paymail.serverConfig.options = append(c.options.paymail.serverConfig.options, diff --git a/client_options.go b/client_options.go index 615b2cd8..86b97bd1 100644 --- a/client_options.go +++ b/client_options.go @@ -459,9 +459,7 @@ func WithPaymailClient(client paymail.ClientInterface) ClientOps { } // WithPaymailSupport will set the configuration for Paymail support (as a server) -func WithPaymailSupport(domains []string, defaultFromPaymail, defaultNote string, - domainValidation, senderValidation bool, -) ClientOps { +func WithPaymailSupport(domains []string, defaultFromPaymail string, domainValidation, senderValidation bool) ClientOps { return func(c *clientOptions) { // Add generic capabilities c.paymail.serverConfig.options = append(c.paymail.serverConfig.options, server.WithP2PCapabilities()) @@ -485,9 +483,6 @@ func WithPaymailSupport(domains []string, defaultFromPaymail, defaultNote string if len(defaultFromPaymail) > 0 { c.paymail.serverConfig.DefaultFromPaymail = defaultFromPaymail } - if len(defaultNote) > 0 { - c.paymail.serverConfig.DefaultNote = defaultNote - } // Add the paymail_address model in bux c.addModels(migrateList, newPaymail("")) @@ -509,7 +504,7 @@ func WithPaymailBeefSupport(pulseURL, pulseAuthToken string) ClientOps { // WithPaymailServerConfig will set the custom server configuration for Paymail // // This will allow overriding the Configuration.actions (paymail service provider) -func WithPaymailServerConfig(config *server.Configuration, defaultFromPaymail, defaultNote string) ClientOps { +func WithPaymailServerConfig(config *server.Configuration, defaultFromPaymail string) ClientOps { return func(c *clientOptions) { if config != nil { c.paymail.serverConfig.Configuration = config @@ -517,9 +512,6 @@ func WithPaymailServerConfig(config *server.Configuration, defaultFromPaymail, d if len(defaultFromPaymail) > 0 { c.paymail.serverConfig.DefaultFromPaymail = defaultFromPaymail } - if len(defaultNote) > 0 { - c.paymail.serverConfig.DefaultNote = defaultNote - } // Add the paymail_address model in bux c.addModels(migrateList, newPaymail("")) diff --git a/client_paymail.go b/client_paymail.go index f51df601..e6fafb33 100644 --- a/client_paymail.go +++ b/client_paymail.go @@ -33,14 +33,6 @@ func (p *paymailOptions) FromSender() string { return defaultSenderPaymail } -// Note will return either the configuration value or the application default -func (p *paymailOptions) Note() string { - if len(p.serverConfig.DefaultNote) > 0 { - return p.serverConfig.DefaultNote - } - return defaultAddressResolutionPurpose -} - // ServerConfig will return the Paymail Server configuration from the options struct func (p *paymailOptions) ServerConfig() *PaymailServerOptions { return p.serverConfig diff --git a/client_test.go b/client_test.go index 28ef625b..c3ecfad8 100644 --- a/client_test.go +++ b/client_test.go @@ -165,7 +165,6 @@ func TestClient_GetPaymailConfig(t *testing.T) { opts = append(opts, WithPaymailSupport( []string{testDomain}, defaultSenderPaymail, - defaultAddressResolutionPurpose, false, false, )) @@ -223,27 +222,6 @@ func TestPaymailOptions_FromSender(t *testing.T) { }) } -// TestPaymailOptions_Note will test the method Note() -func TestPaymailOptions_Note(t *testing.T) { - t.Parallel() - - t.Run("no note, use default", func(t *testing.T) { - p := &paymailOptions{ - serverConfig: &PaymailServerOptions{}, - } - assert.Equal(t, defaultAddressResolutionPurpose, p.Note()) - }) - - t.Run("custom note set", func(t *testing.T) { - p := &paymailOptions{ - serverConfig: &PaymailServerOptions{ - DefaultNote: "from this person", - }, - } - assert.Equal(t, "from this person", p.Note()) - }) -} - // TestPaymailOptions_ServerConfig will test the method ServerConfig() func TestPaymailOptions_ServerConfig(t *testing.T) { // t.Parallel() @@ -259,7 +237,6 @@ func TestPaymailOptions_ServerConfig(t *testing.T) { opts = append(opts, WithPaymailSupport( []string{testDomain}, defaultSenderPaymail, - defaultAddressResolutionPurpose, false, false, ), WithLogger(&logger)) diff --git a/definitions.go b/definitions.go index 4e572112..f23b6bc8 100644 --- a/definitions.go +++ b/definitions.go @@ -106,16 +106,15 @@ const ( statusSkipped = "skipped" // Paymail / Handles - cacheKeyAddressResolution = "paymail-address-resolution-" - cacheKeyCapabilities = "paymail-capabilities-" - cacheTTLAddressResolution = 2 * time.Minute - cacheTTLCapabilities = 60 * time.Minute - defaultAddressResolutionPurpose = "Created with BUX: getbux.io" - defaultSenderPaymail = "buxorg@moneybutton.com" - handleHandcashPrefix = "$" - handleMaxLength = 25 - handleRelayPrefix = "1" - p2pMetadataField = "p2p_tx_metadata" + cacheKeyAddressResolution = "paymail-address-resolution-" + cacheKeyCapabilities = "paymail-capabilities-" + cacheTTLAddressResolution = 2 * time.Minute + cacheTTLCapabilities = 60 * time.Minute + defaultSenderPaymail = "buxorg@bux.com" + handleHandcashPrefix = "$" + handleMaxLength = 25 + handleRelayPrefix = "1" + p2pMetadataField = "p2p_tx_metadata" // Misc gormTypeText = "text" diff --git a/examples/client/mysql/mysql.go b/examples/client/mysql/mysql.go index bd65566b..3aaa0feb 100644 --- a/examples/client/mysql/mysql.go +++ b/examples/client/mysql/mysql.go @@ -33,7 +33,7 @@ func main() { TxTimeout: defaultTimeouts, User: os.Getenv("DB_USER"), }), - bux.WithPaymailSupport([]string{"test.com"}, "example@test.com", "Example note", false, false), + bux.WithPaymailSupport([]string{"test.com"}, "example@test.com", false, false), bux.WithAutoMigrate(bux.BaseModels...), ) if err != nil { diff --git a/examples/client/paymail_support/paymail_support.go b/examples/client/paymail_support/paymail_support.go index 11379cdc..5b4e5701 100644 --- a/examples/client/paymail_support/paymail_support.go +++ b/examples/client/paymail_support/paymail_support.go @@ -13,7 +13,6 @@ func main() { bux.WithPaymailSupport( []string{"test.com"}, "from@test.com", - "some default note", true, false, ), ) diff --git a/model_draft_transactions.go b/model_draft_transactions.go index ada89589..3abebc70 100644 --- a/model_draft_transactions.go +++ b/model_draft_transactions.go @@ -173,7 +173,6 @@ func (m *DraftTransaction) processConfigOutputs(ctx context.Context) error { ctx, c.Cachestore(), c.PaymailClient(), paymailFrom, - c.GetPaymailConfig().DefaultNote, false, ); err != nil { return err @@ -186,7 +185,6 @@ func (m *DraftTransaction) processConfigOutputs(ctx context.Context) error { ctx, c.Cachestore(), c.PaymailClient(), paymailFrom, - c.GetPaymailConfig().DefaultNote, true, ); err != nil { return err @@ -207,7 +205,6 @@ func (m *DraftTransaction) processConfigOutputs(ctx context.Context) error { ctx, c.Cachestore(), c.PaymailClient(), paymailFrom, - c.GetPaymailConfig().DefaultNote, true, ); err != nil { return err diff --git a/model_transaction_config.go b/model_transaction_config.go index 5fe3e2b0..f4aafd2f 100644 --- a/model_transaction_config.go +++ b/model_transaction_config.go @@ -163,7 +163,7 @@ func (t TransactionConfig) Value() (driver.Value, error) { // processOutput will inspect the output to determine how to process func (t *TransactionOutput) processOutput(ctx context.Context, cacheStore cachestore.ClientInterface, - paymailClient paymail.ClientInterface, defaultFromSender, defaultNote string, checkSatoshis bool, + paymailClient paymail.ClientInterface, defaultFromSender string, checkSatoshis bool, ) error { // Convert known handle formats ($handcash or 1relayx) if strings.Contains(t.To, handleHandcashPrefix) || @@ -180,7 +180,7 @@ func (t *TransactionOutput) processOutput(ctx context.Context, cacheStore caches if checkSatoshis && t.Satoshis <= 0 { return ErrOutputValueTooLow } - return t.processPaymailOutput(ctx, cacheStore, paymailClient, defaultFromSender, defaultNote) + return t.processPaymailOutput(ctx, cacheStore, paymailClient, defaultFromSender) } else if len(t.To) > 0 { // Standard Bitcoin Address if checkSatoshis && t.Satoshis <= 0 { return ErrOutputValueTooLow @@ -198,7 +198,7 @@ func (t *TransactionOutput) processOutput(ctx context.Context, cacheStore caches // processPaymailOutput will detect how to process the Paymail output given func (t *TransactionOutput) processPaymailOutput(ctx context.Context, cacheStore cachestore.ClientInterface, - paymailClient paymail.ClientInterface, fromPaymail, defaultNote string, + paymailClient paymail.ClientInterface, fromPaymail string, ) error { // Standardize the paymail address (break into parts) alias, domain, paymailAddress := paymail.SanitizePaymail(t.To) @@ -236,51 +236,7 @@ func (t *TransactionOutput) processPaymailOutput(ctx context.Context, cacheStore ) } - // Default is resolving using the deprecated address resolution method - return t.processPaymailViaAddressResolution( - ctx, cacheStore, paymailClient, capabilities, - fromPaymail, defaultNote, - ) -} - -// processPaymailViaAddressResolution will use a deprecated way to resolve a Paymail address -func (t *TransactionOutput) processPaymailViaAddressResolution(ctx context.Context, cacheStore cachestore.ClientInterface, - paymailClient paymail.ClientInterface, capabilities *paymail.CapabilitiesPayload, defaultFromSender, defaultNote string, -) error { - // Requires a note value - if len(t.PaymailP4.Note) == 0 { - t.PaymailP4.Note = defaultNote - } - if len(t.PaymailP4.FromPaymail) == 0 { - t.PaymailP4.FromPaymail = defaultFromSender - } - - // Resolve the address information - resolution, err := resolvePaymailAddress( - ctx, cacheStore, paymailClient, capabilities, - t.PaymailP4.Alias, t.PaymailP4.Domain, - t.PaymailP4.Note, - t.PaymailP4.FromPaymail, - ) - if err != nil { - return err - } else if resolution == nil { - return ErrResolutionFailed - } - - // Set the output data - t.Scripts = append( - t.Scripts, - &ScriptOutput{ - Address: resolution.Address, - Satoshis: t.Satoshis, - Script: resolution.Output, - ScriptType: utils.ScriptTypePubKeyHash, - }, - ) - t.PaymailP4.ResolutionType = ResolutionTypeBasic - - return nil + return fmt.Errorf("paymail provider does not support P2P") } // processPaymailViaP2P will process the output for P2P Paymail resolution diff --git a/model_transaction_config_test.go b/model_transaction_config_test.go index 3e7d2fdc..5e1b95ae 100644 --- a/model_transaction_config_test.go +++ b/model_transaction_config_test.go @@ -167,8 +167,7 @@ func TestTransactionConfig_processOutput(t *testing.T) { err := out.processOutput( context.Background(), nil, client, - defaultSenderPaymail, defaultAddressResolutionPurpose, - true, + defaultSenderPaymail, true, ) require.Error(t, err) assert.ErrorIs(t, err, ErrOutputValueNotRecognized) @@ -184,8 +183,7 @@ func TestTransactionConfig_processOutput(t *testing.T) { err := out.processOutput( context.Background(), nil, client, - defaultSenderPaymail, defaultAddressResolutionPurpose, - true, + defaultSenderPaymail, true, ) require.Error(t, err) assert.ErrorIs(t, err, ErrPaymailAddressIsInvalid) @@ -216,19 +214,9 @@ func TestTransactionConfig_processOutput(t *testing.T) { err = out.processOutput( context.Background(), tc.Cachestore(), client, - defaultSenderPaymail, defaultAddressResolutionPurpose, - true, + defaultSenderPaymail, true, ) - require.NoError(t, err) - assert.Equal(t, satoshis, out.Satoshis) - assert.Equal(t, testAlias+"@"+testDomain, out.To) - assert.Equal(t, defaultSenderPaymail, out.PaymailP4.FromPaymail) - assert.Equal(t, testAlias, out.PaymailP4.Alias) - assert.Equal(t, testDomain, out.PaymailP4.Domain) - assert.Equal(t, defaultAddressResolutionPurpose, out.PaymailP4.Note) - assert.Equal(t, ResolutionTypeBasic, out.PaymailP4.ResolutionType) - assert.Equal(t, "", out.PaymailP4.ReferenceID) - assert.Equal(t, "", out.PaymailP4.ReceiveEndpoint) + assert.Equal(t, err.Error(), "paymail provider does not support P2P") }) t.Run("basic $handle -> paymail address resolution - valid response", func(t *testing.T) { @@ -259,19 +247,9 @@ func TestTransactionConfig_processOutput(t *testing.T) { err = out.processOutput( context.Background(), tc.Cachestore(), client, - defaultSenderPaymail, defaultAddressResolutionPurpose, - true, + defaultSenderPaymail, true, ) - require.NoError(t, err) - assert.Equal(t, satoshis, out.Satoshis) - assert.Equal(t, testAlias+"@"+handleDomain, out.To) - assert.Equal(t, defaultSenderPaymail, out.PaymailP4.FromPaymail) - assert.Equal(t, testAlias, out.PaymailP4.Alias) - assert.Equal(t, handleDomain, out.PaymailP4.Domain) - assert.Equal(t, defaultAddressResolutionPurpose, out.PaymailP4.Note) - assert.Equal(t, ResolutionTypeBasic, out.PaymailP4.ResolutionType) - assert.Equal(t, "", out.PaymailP4.ReferenceID) - assert.Equal(t, "", out.PaymailP4.ReceiveEndpoint) + assert.Equal(t, err.Error(), "paymail provider does not support P2P") }) t.Run("basic 1handle -> paymail address resolution - valid response", func(t *testing.T) { @@ -302,19 +280,9 @@ func TestTransactionConfig_processOutput(t *testing.T) { err = out.processOutput( context.Background(), tc.Cachestore(), client, - defaultSenderPaymail, defaultAddressResolutionPurpose, - true, + defaultSenderPaymail, true, ) - require.NoError(t, err) - assert.Equal(t, satoshis, out.Satoshis) - assert.Equal(t, testAlias+"@"+handleDomain, out.To) - assert.Equal(t, defaultSenderPaymail, out.PaymailP4.FromPaymail) - assert.Equal(t, testAlias, out.PaymailP4.Alias) - assert.Equal(t, handleDomain, out.PaymailP4.Domain) - assert.Equal(t, defaultAddressResolutionPurpose, out.PaymailP4.Note) - assert.Equal(t, ResolutionTypeBasic, out.PaymailP4.ResolutionType) - assert.Equal(t, "", out.PaymailP4.ReferenceID) - assert.Equal(t, "", out.PaymailP4.ReceiveEndpoint) + assert.Equal(t, err.Error(), "paymail provider does not support P2P") }) t.Run("p2p paymail address resolution - valid response", func(t *testing.T) { @@ -342,8 +310,7 @@ func TestTransactionConfig_processOutput(t *testing.T) { err = out.processOutput( context.Background(), tc.Cachestore(), client, - defaultSenderPaymail, defaultAddressResolutionPurpose, - true, + defaultSenderPaymail, true, ) require.NoError(t, err) assert.Equal(t, satoshis, out.Satoshis) diff --git a/paymail.go b/paymail.go index f3c11023..70d0e92a 100644 --- a/paymail.go +++ b/paymail.go @@ -4,11 +4,9 @@ import ( "context" "errors" "fmt" - "strings" - "time" - "github.com/bitcoin-sv/go-paymail" "github.com/mrz1836/go-cachestore" + "strings" ) // getCapabilities is a utility function to retrieve capabilities for a Paymail provider @@ -77,9 +75,6 @@ func hasP2P(capabilities *paymail.CapabilitiesPayload) (success bool, p2pDestina p2pSubmitTxURL = p2pBeefSubmitTxURL format = BeefPaymailPayloadFormat } - //else { - // format = BasicPaymailPayloadFormat - //} if len(p2pSubmitTxURL) > 0 && len(p2pDestinationURL) > 0 { success = true @@ -87,56 +82,6 @@ func hasP2P(capabilities *paymail.CapabilitiesPayload) (success bool, p2pDestina return } -// resolvePaymailAddress is an old way to resolve a Paymail address (if P2P is not supported) -// -// Deprecated: this is already deprecated by TSC, use P2P or the new P4 -func resolvePaymailAddress(ctx context.Context, cs cachestore.ClientInterface, client paymail.ClientInterface, - capabilities *paymail.CapabilitiesPayload, alias, domain, purpose, senderPaymail string, -) (*paymail.ResolutionPayload, error) { - // Attempt to get from cachestore - // todo: allow user to configure the time that they want to cache the address resolution (if they want to cache or not) - resolution := new(paymail.ResolutionPayload) - if err := cs.GetModel( - ctx, cacheKeyAddressResolution+alias+"-"+domain, resolution, - ); err != nil && !errors.Is(err, cachestore.ErrKeyNotFound) { - return nil, err - } else if resolution != nil && len(resolution.Output) > 0 { - return resolution, nil - } - - // Get the URL - addressResolutionURL := capabilities.GetString( - paymail.BRFCBasicAddressResolution, paymail.BRFCPaymentDestination, - ) - if len(addressResolutionURL) == 0 { - return nil, ErrMissingAddressResolutionURL - } - - // Resolve address - response, err := client.ResolveAddress( - addressResolutionURL, - alias, domain, - &paymail.SenderRequest{ - Dt: time.Now().UTC().Format(time.RFC3339), // UTC is assumed - Purpose: purpose, // Generic message about the resolution - SenderHandle: senderPaymail, // Assumed it's a paymail@domain.com - }, - ) - if err != nil { - return nil, err - } - - // Save to cachestore - if cs != nil && !cs.Engine().IsEmpty() { - _ = cs.SetModel( - ctx, cacheKeyAddressResolution+alias+"-"+domain, - &response.ResolutionPayload, cacheTTLAddressResolution, - ) - } - - return &response.ResolutionPayload, nil -} - // startP2PTransaction will start the P2P transaction, returning the reference ID and outputs func startP2PTransaction(client paymail.ClientInterface, alias, domain, p2pDestinationURL string, satoshis uint64, diff --git a/paymail_test.go b/paymail_test.go index cc878fed..9db50bcc 100644 --- a/paymail_test.go +++ b/paymail_test.go @@ -368,160 +368,3 @@ func Test_getCapabilities(t *testing.T) { assert.Equal(t, 3, len(payload.Capabilities)) }) } - -// Test_resolvePaymailAddress will test the method resolvePaymailAddress() -func Test_resolvePaymailAddress(t *testing.T) { - // t.Parallel() mocking does not allow parallel tests - - t.Run("[mocked] - valid response - no cache found", func(t *testing.T) { - client := newTestPaymailClient(t, []string{testDomain}) - - redisClient, redisConn := xtester.LoadMockRedis( - testIdleTimeout, - testMaxConnLifetime, - testMaxActiveConnections, - testMaxIdleConnections, - ) - logger := zerolog.Nop() - - tc, err := NewClient(context.Background(), - WithRedisConnection(redisClient), - WithTaskqConfig(taskmanager.DefaultTaskQConfig(testQueueName)), - WithSQLite(&datastore.SQLiteConfig{Shared: true}), - WithChainstateOptions(false, false, false, false), - WithDebugging(), - WithMinercraft(&chainstate.MinerCraftBase{}), - WithLogger(&logger), - ) - require.NoError(t, err) - require.NotNil(t, tc) - defer func() { - time.Sleep(1 * time.Second) - CloseClient(context.Background(), t, tc) - }() - - // Get command - getCmd := redisConn.Command(cache.GetCommand, cacheKeyCapabilities+testDomain).Expect(nil) - - // Mock all responses - mockValidResponse(http.StatusOK, false, testDomain) - - // Get capabilities - var payload *paymail.CapabilitiesPayload - payload, err = getCapabilities( - context.Background(), tc.Cachestore(), client, testDomain, - ) - require.NoError(t, err) - require.NotNil(t, payload) - assert.Equal(t, true, getCmd.Called) - - // Get command - getCmd2 := redisConn.Command(cache.GetCommand, cacheKeyAddressResolution+testAlias+"-"+testDomain).Expect(nil) - - // Resolve address - var resolvePayload *paymail.ResolutionPayload - resolvePayload, err = resolvePaymailAddress( - context.Background(), tc.Cachestore(), client, payload, - testAlias, testDomain, defaultAddressResolutionPurpose, defaultSenderPaymail, - ) - require.NoError(t, err) - require.NotNil(t, resolvePayload) - assert.Equal(t, true, getCmd2.Called) - assert.Equal(t, "1Cat862cjhp8SgLLMvin5gyk5UScasg1P9", resolvePayload.Address) - assert.Equal(t, "76a9147f11c8f67a2781df0400ebfb1f31b4c72a780b9d88ac", resolvePayload.Output) - assert.Equal(t, "", resolvePayload.Signature) - }) - - t.Run("valid response - no cache found", func(t *testing.T) { - client := newTestPaymailClient(t, []string{testDomain}) - - logger := zerolog.Nop() - tcOpts := DefaultClientOpts(true, true) - tcOpts = append(tcOpts, WithLogger(&logger)) - - tc, err := NewClient( - context.Background(), - tcOpts..., - ) - require.NoError(t, err) - require.NotNil(t, tc) - defer func() { - time.Sleep(1 * time.Second) - CloseClient(context.Background(), t, tc) - }() - - // Mock all responses - mockValidResponse(http.StatusOK, false, testDomain) - - // Get capabilities - var payload *paymail.CapabilitiesPayload - payload, err = getCapabilities( - context.Background(), tc.Cachestore(), client, testDomain, - ) - require.NoError(t, err) - require.NotNil(t, payload) - - // Resolve address - var resolvePayload *paymail.ResolutionPayload - resolvePayload, err = resolvePaymailAddress( - context.Background(), tc.Cachestore(), client, payload, - testAlias, testDomain, defaultAddressResolutionPurpose, defaultSenderPaymail, - ) - require.NoError(t, err) - require.NotNil(t, resolvePayload) - assert.Equal(t, "1Cat862cjhp8SgLLMvin5gyk5UScasg1P9", resolvePayload.Address) - assert.Equal(t, "76a9147f11c8f67a2781df0400ebfb1f31b4c72a780b9d88ac", resolvePayload.Output) - assert.Equal(t, "", resolvePayload.Signature) - }) - - t.Run("multiple requests for same address resolution", func(t *testing.T) { - client := newTestPaymailClient(t, []string{testDomain}) - - logger := zerolog.Nop() - tcOpts := DefaultClientOpts(true, true) - tcOpts = append(tcOpts, WithLogger(&logger)) - - tc, err := NewClient( - context.Background(), - tcOpts..., - ) - require.NoError(t, err) - require.NotNil(t, tc) - defer func() { - time.Sleep(1 * time.Second) - CloseClient(context.Background(), t, tc) - }() - - // Mock all responses - mockValidResponse(http.StatusOK, false, testDomain) - - // Get capabilities - var payload *paymail.CapabilitiesPayload - payload, err = getCapabilities( - context.Background(), tc.Cachestore(), client, testDomain, - ) - require.NoError(t, err) - require.NotNil(t, payload) - - // Resolve address - var resolvePayload *paymail.ResolutionPayload - resolvePayload, err = resolvePaymailAddress( - context.Background(), tc.Cachestore(), client, payload, - testAlias, testDomain, defaultAddressResolutionPurpose, defaultSenderPaymail, - ) - require.NoError(t, err) - require.NotNil(t, resolvePayload) - assert.Equal(t, "1Cat862cjhp8SgLLMvin5gyk5UScasg1P9", resolvePayload.Address) - - time.Sleep(1 * time.Second) - - // Resolve address - resolvePayload, err = resolvePaymailAddress( - context.Background(), tc.Cachestore(), client, payload, - testAlias, testDomain, defaultAddressResolutionPurpose, defaultSenderPaymail, - ) - require.NoError(t, err) - require.NotNil(t, resolvePayload) - assert.Equal(t, "1Cat862cjhp8SgLLMvin5gyk5UScasg1P9", resolvePayload.Address) - }) -} From 6a7d2bb607264884274fa3004a348ab14b2fd592 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Pawe=C5=82=20Lewandowski?= <35259896+pawellewandowski98@users.noreply.github.com> Date: Thu, 18 Jan 2024 13:26:00 +0100 Subject: [PATCH 03/13] feat(BUX-498): remove block header model (#541) --- action_blockheader.go | 116 ------------ client.go | 36 ++-- client_options.go | 9 - client_options_test.go | 47 +---- definitions.go | 39 ++-- errors.go | 3 - interface.go | 16 -- locks.go | 7 +- model_block_headers.go | 403 ----------------------------------------- models_test.go | 3 +- 10 files changed, 36 insertions(+), 643 deletions(-) delete mode 100644 action_blockheader.go delete mode 100644 model_block_headers.go diff --git a/action_blockheader.go b/action_blockheader.go deleted file mode 100644 index a4335e05..00000000 --- a/action_blockheader.go +++ /dev/null @@ -1,116 +0,0 @@ -package bux - -import ( - "context" - "fmt" - - "github.com/libsv/go-bc" - "github.com/mrz1836/go-datastore" -) - -// RecordBlockHeader will save a block header into the Datastore -// -// hash is the hash of the block header -// bh is the block header data -// opts are model options and can include "metadata" -func (c *Client) RecordBlockHeader(ctx context.Context, hash string, height uint32, bh bc.BlockHeader, - opts ...ModelOps) (*BlockHeader, error) { - - // Check for existing NewRelic transaction - ctx = c.GetOrStartTxn(ctx, "record_block_header") - - // Create the model & set the default options (gives options from client->model) - newOpts := c.DefaultModelOptions(append(opts, New())...) - blockHeader := newBlockHeader(hash, height, bh, newOpts...) - - // Ensure that we have a transaction id (created from the txHex) - id := blockHeader.GetID() - if len(id) == 0 { - return nil, ErrMissingBlockHeaderHash - } - - // Create the lock and set the release for after the function completes - unlock, err := newWriteLock( - ctx, fmt.Sprintf(lockKeyRecordBlockHeader, id), c.Cachestore(), - ) - defer unlock() - if err != nil { - return nil, err - } - - // Process & save the transaction model - if err = blockHeader.Save(ctx); err != nil { - return nil, err - } - - // Return the response - return blockHeader, nil -} - -// GetBlockHeaders will get all the block headers from the Datastore -func (c *Client) GetBlockHeaders(ctx context.Context, metadataConditions *Metadata, - conditions *map[string]interface{}, queryParams *datastore.QueryParams, opts ...ModelOps) ([]*BlockHeader, error) { - - // Check for existing NewRelic transaction - ctx = c.GetOrStartTxn(ctx, "get_block_headers") - - // Get the block headers - blockHeaders, err := getBlockHeaders( - ctx, metadataConditions, conditions, queryParams, - c.DefaultModelOptions(opts...)..., - ) - if err != nil { - return nil, err - } - - return blockHeaders, nil -} - -// GetBlockHeadersCount will get a count of all the block headers from the Datastore -func (c *Client) GetBlockHeadersCount(ctx context.Context, metadataConditions *Metadata, - conditions *map[string]interface{}, opts ...ModelOps) (int64, error) { - - // Check for existing NewRelic transaction - ctx = c.GetOrStartTxn(ctx, "get_block_headers_count") - - // Get the block headers count - count, err := getBlockHeadersCount( - ctx, metadataConditions, conditions, - c.DefaultModelOptions(opts...)..., - ) - if err != nil { - return 0, err - } - - return count, nil -} - -// GetUnsyncedBlockHeaders get all unsynced block headers -func (c *Client) GetUnsyncedBlockHeaders(ctx context.Context) ([]*BlockHeader, error) { - - // Check for existing NewRelic transaction - ctx = c.GetOrStartTxn(ctx, "get_unsynced_block_headers") - - // Get the unsynced block headers - return getUnsyncedBlockHeaders(ctx, c.DefaultModelOptions()...) -} - -// GetLastBlockHeader get last block header -func (c *Client) GetLastBlockHeader(ctx context.Context) (*BlockHeader, error) { - - // Check for existing NewRelic transaction - ctx = c.GetOrStartTxn(ctx, "get_last_block_header") - - // Get the last block header - return getLastBlockHeader(ctx, c.DefaultModelOptions()...) -} - -// GetBlockHeaderByHeight get the block header by height -func (c *Client) GetBlockHeaderByHeight(ctx context.Context, height uint32) (*BlockHeader, error) { - - // Check for existing NewRelic transaction - ctx = c.GetOrStartTxn(ctx, "get_block_header_by_height") - - // Get the block header by height - return getBlockHeaderByHeight(ctx, height, c.DefaultModelOptions()...) -} diff --git a/client.go b/client.go index a031b2cf..89458213 100644 --- a/client.go +++ b/client.go @@ -26,22 +26,21 @@ type ( // clientOptions holds all the configuration for the client clientOptions struct { - cacheStore *cacheStoreOptions // Configuration options for Cachestore (ristretto, redis, etc.) - cluster *clusterOptions // Configuration options for the cluster coordinator - chainstate *chainstateOptions // Configuration options for Chainstate (broadcast, sync, etc.) - dataStore *dataStoreOptions // Configuration options for the DataStore (MySQL, etc.) - debug bool // If the client is in debug mode - encryptionKey string // Encryption key for encrypting sensitive information (IE: paymail xPub) (hex encoded key) - httpClient HTTPInterface // HTTP interface to use - importBlockHeadersURL string // The URL of the block headers zip file to import old block headers on startup. if block 0 is found in the DB, block headers will mpt be downloaded - iuc bool // (Input UTXO Check) True will check input utxos when saving transactions - logger *zerolog.Logger // Internal logging - models *modelOptions // Configuration options for the loaded models - newRelic *newRelicOptions // Configuration options for NewRelic - notifications *notificationsOptions // Configuration options for Notifications - paymail *paymailOptions // Paymail options & client - taskManager *taskManagerOptions // Configuration options for the TaskManager (TaskQ, etc.) - userAgent string // User agent for all outgoing requests + cacheStore *cacheStoreOptions // Configuration options for Cachestore (ristretto, redis, etc.) + cluster *clusterOptions // Configuration options for the cluster coordinator + chainstate *chainstateOptions // Configuration options for Chainstate (broadcast, sync, etc.) + dataStore *dataStoreOptions // Configuration options for the DataStore (MySQL, etc.) + debug bool // If the client is in debug mode + encryptionKey string // Encryption key for encrypting sensitive information (IE: paymail xPub) (hex encoded key) + httpClient HTTPInterface // HTTP interface to use + iuc bool // (Input UTXO Check) True will check input utxos when saving transactions + logger *zerolog.Logger // Internal logging + models *modelOptions // Configuration options for the loaded models + newRelic *newRelicOptions // Configuration options for NewRelic + notifications *notificationsOptions // Configuration options for Notifications + paymail *paymailOptions // Paymail options & client + taskManager *taskManagerOptions // Configuration options for the TaskManager (TaskQ, etc.) + userAgent string // User agent for all outgoing requests } // chainstateOptions holds the chainstate configuration and client @@ -357,11 +356,6 @@ func (c *Client) HTTPClient() HTTPInterface { return c.options.httpClient } -// ImportBlockHeadersFromURL will the URL where to import block headers from -func (c *Client) ImportBlockHeadersFromURL() string { - return c.options.importBlockHeadersURL -} - // IsDebug will return the debug flag (bool) func (c *Client) IsDebug() bool { return c.options.debug diff --git a/client_options.go b/client_options.go index 86b97bd1..67562eaf 100644 --- a/client_options.go +++ b/client_options.go @@ -243,15 +243,6 @@ func WithIUCDisabled() ClientOps { } } -// WithImportBlockHeaders will import block headers on startup -func WithImportBlockHeaders(importBlockHeadersURL string) ClientOps { - return func(c *clientOptions) { - if len(importBlockHeadersURL) > 0 { - c.importBlockHeadersURL = importBlockHeadersURL - } - } -} - // WithHTTPClient will set the custom http interface func WithHTTPClient(httpClient HTTPInterface) ClientOps { return func(c *clientOptions) { diff --git a/client_options_test.go b/client_options_test.go index fc359e7e..107af5c4 100644 --- a/client_options_test.go +++ b/client_options_test.go @@ -583,8 +583,7 @@ func TestWithModels(t *testing.T) { assert.Equal(t, []string{ ModelXPub.String(), ModelAccessKey.String(), - ModelDraftTransaction.String(), - ModelTransaction.String(), ModelBlockHeader.String(), + ModelDraftTransaction.String(), ModelTransaction.String(), ModelSyncTransaction.String(), ModelDestination.String(), ModelUtxo.String(), }, tc.GetModelNames()) @@ -602,8 +601,7 @@ func TestWithModels(t *testing.T) { assert.Equal(t, []string{ ModelXPub.String(), ModelAccessKey.String(), - ModelDraftTransaction.String(), - ModelTransaction.String(), ModelBlockHeader.String(), + ModelDraftTransaction.String(), ModelTransaction.String(), ModelSyncTransaction.String(), ModelDestination.String(), ModelUtxo.String(), ModelPaymailAddress.String(), }, tc.GetModelNames()) @@ -646,45 +644,6 @@ func TestWithIUCDisabled(t *testing.T) { }) } -// TestWithImportBlockHeaders will test the method WithImportBlockHeaders() -func TestWithImportBlockHeaders(t *testing.T) { - t.Parallel() - testLogger := zerolog.Nop() - - t.Run("check type", func(t *testing.T) { - opt := WithImportBlockHeaders("") - assert.IsType(t, *new(ClientOps), opt) - }) - - t.Run("empty url", func(t *testing.T) { - opts := DefaultClientOpts(false, true) - opts = append(opts, WithImportBlockHeaders("")) - opts = append(opts, WithLogger(&testLogger)) - - tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) - require.NoError(t, err) - require.NotNil(t, tc) - defer CloseClient(context.Background(), t, tc) - - assert.Equal(t, "", tc.ImportBlockHeadersFromURL()) - }) - - t.Run("custom import url", func(t *testing.T) { - customURL := "https://domain.com/import.txt" - - opts := DefaultClientOpts(false, true) - opts = append(opts, WithImportBlockHeaders(customURL)) - opts = append(opts, WithLogger(&testLogger)) - - tc, err := NewClient(tester.GetNewRelicCtx(t, defaultNewRelicApp, defaultNewRelicTx), opts...) - require.NoError(t, err) - require.NotNil(t, tc) - defer CloseClient(context.Background(), t, tc) - - assert.Equal(t, customURL, tc.ImportBlockHeadersFromURL()) - }) -} - // TestWithHTTPClient will test the method WithHTTPClient() func TestWithHTTPClient(t *testing.T) { t.Parallel() @@ -835,7 +794,6 @@ func TestWithAutoMigrate(t *testing.T) { ModelAccessKey.String(), ModelDraftTransaction.String(), ModelTransaction.String(), - ModelBlockHeader.String(), ModelSyncTransaction.String(), ModelDestination.String(), ModelUtxo.String(), @@ -857,7 +815,6 @@ func TestWithAutoMigrate(t *testing.T) { ModelAccessKey.String(), ModelDraftTransaction.String(), ModelTransaction.String(), - ModelBlockHeader.String(), ModelSyncTransaction.String(), ModelDestination.String(), ModelUtxo.String(), diff --git a/definitions.go b/definitions.go index f23b6bc8..2972f487 100644 --- a/definitions.go +++ b/definitions.go @@ -6,28 +6,26 @@ import ( // Defaults for engine functionality const ( - changeOutputSize = uint64(35) // Average size in bytes of a change output - databaseLongReadTimeout = 30 * time.Second // For all "GET" or "SELECT" methods - defaultBroadcastTimeout = 25 * time.Second // Default timeout for broadcasting - defaultCacheLockTTL = 20 // in Seconds - defaultCacheLockTTW = 10 // in Seconds - defaultDatabaseReadTimeout = 20 * time.Second // For all "GET" or "SELECT" methods - defaultDraftTxExpiresIn = 20 * time.Second // Default TTL for draft transactions - defaultHTTPTimeout = 20 * time.Second // Default timeout for HTTP requests - defaultOverheadSize = uint64(8) // 8 bytes is the default overhead in a transaction = 4 bytes version + 4 bytes nLockTime - defaultQueryTxTimeout = 10 * time.Second // Default timeout for syncing on-chain information - defaultSleepForNewBlockHeaders = 30 * time.Second // Default wait before checking for a new unprocessed block - defaultUserAgent = "bux: " + version // Default user agent - dustLimit = uint64(1) // Dust limit - mongoTestVersion = "6.0.4" // Mongo Testing Version - sqliteTestVersion = "3.37.0" // SQLite Testing Version (dummy version for now) - version = "v0.13.0" // bux version + changeOutputSize = uint64(35) // Average size in bytes of a change output + databaseLongReadTimeout = 30 * time.Second // For all "GET" or "SELECT" methods + defaultBroadcastTimeout = 25 * time.Second // Default timeout for broadcasting + defaultCacheLockTTL = 20 // in Seconds + defaultCacheLockTTW = 10 // in Seconds + defaultDatabaseReadTimeout = 20 * time.Second // For all "GET" or "SELECT" methods + defaultDraftTxExpiresIn = 20 * time.Second // Default TTL for draft transactions + defaultHTTPTimeout = 20 * time.Second // Default timeout for HTTP requests + defaultOverheadSize = uint64(8) // 8 bytes is the default overhead in a transaction = 4 bytes version + 4 bytes nLockTime + defaultQueryTxTimeout = 10 * time.Second // Default timeout for syncing on-chain information + defaultUserAgent = "bux: " + version // Default user agent + dustLimit = uint64(1) // Dust limit + mongoTestVersion = "6.0.4" // Mongo Testing Version + sqliteTestVersion = "3.37.0" // SQLite Testing Version (dummy version for now) + version = "v0.13.0" // bux version ) // All the base models const ( ModelAccessKey ModelName = "access_key" - ModelBlockHeader ModelName = "block_header" ModelDestination ModelName = "destination" ModelDraftTransaction ModelName = "draft_transaction" ModelMetadata ModelName = "metadata" @@ -42,7 +40,6 @@ const ( // AllModelNames is a list of all models var AllModelNames = []ModelName{ ModelAccessKey, - ModelBlockHeader, ModelDestination, ModelMetadata, ModelPaymailAddress, @@ -56,7 +53,6 @@ var AllModelNames = []ModelName{ // Internal table names const ( tableAccessKeys = "access_keys" - tableBlockHeaders = "block_headers" tableDestinations = "destinations" tableDraftTransactions = "draft_transactions" tablePaymailAddresses = "paymail_addresses" @@ -152,11 +148,6 @@ var BaseModels = []interface{}{ Model: *NewBaseModel(ModelTransaction), }, - // Block Headers as received by the BitCoin network - &BlockHeader{ - Model: *NewBaseModel(ModelBlockHeader), - }, - // Sync configuration for transactions (on-chain) (related to Transaction) &SyncTransaction{ Model: *NewBaseModel(ModelSyncTransaction), diff --git a/errors.go b/errors.go index 17562034..60f1be78 100644 --- a/errors.go +++ b/errors.go @@ -106,9 +106,6 @@ var ErrDraftIDMismatch = errors.New("transaction draft id does not match utxo dr // ErrMissingTxHex is when the hex is missing or invalid and creates an empty id var ErrMissingTxHex = errors.New("transaction hex is empty or id is missing") -// ErrMissingBlockHeaderHash is when the hash is missing or invalid and creates an empty id -var ErrMissingBlockHeaderHash = errors.New("block header hash is empty or id is missing") - // ErrUtxoAlreadySpent is when the utxo is already spent, but is trying to be used var ErrUtxoAlreadySpent = errors.New("utxo has already been spent") diff --git a/interface.go b/interface.go index 19caeed5..cfd8905b 100644 --- a/interface.go +++ b/interface.go @@ -9,7 +9,6 @@ import ( "github.com/BuxOrg/bux/notifications" "github.com/BuxOrg/bux/taskmanager" "github.com/bitcoin-sv/go-paymail" - "github.com/libsv/go-bc" "github.com/mrz1836/go-cachestore" "github.com/mrz1836/go-datastore" "github.com/rs/zerolog" @@ -43,19 +42,6 @@ type AdminService interface { conditions *map[string]interface{}, opts ...ModelOps) (int64, error) } -// BlockHeaderService is the block header actions -type BlockHeaderService interface { - GetBlockHeaderByHeight(ctx context.Context, height uint32) (*BlockHeader, error) - GetBlockHeaders(ctx context.Context, metadata *Metadata, conditions *map[string]interface{}, - queryParams *datastore.QueryParams, opts ...ModelOps) ([]*BlockHeader, error) - GetBlockHeadersCount(ctx context.Context, metadata *Metadata, conditions *map[string]interface{}, - opts ...ModelOps) (int64, error) - GetLastBlockHeader(ctx context.Context) (*BlockHeader, error) - GetUnsyncedBlockHeaders(ctx context.Context) ([]*BlockHeader, error) - RecordBlockHeader(ctx context.Context, hash string, height uint32, bh bc.BlockHeader, - opts ...ModelOps) (*BlockHeader, error) -} - // ClientService is the client related services type ClientService interface { Cachestore() cachestore.ClientInterface @@ -175,7 +161,6 @@ type XPubService interface { type ClientInterface interface { AccessKeyService AdminService - BlockHeaderService ClientService DestinationService DraftTransactionService @@ -191,7 +176,6 @@ type ClientInterface interface { DefaultSyncConfig() *SyncConfig EnableNewRelic() GetOrStartTxn(ctx context.Context, name string) context.Context - ImportBlockHeadersFromURL() string IsDebug() bool IsEncryptionKeySet() bool IsIUCEnabled() bool diff --git a/locks.go b/locks.go index 783d4d05..f4b48aeb 100644 --- a/locks.go +++ b/locks.go @@ -10,10 +10,9 @@ const ( lockKeyProcessBroadcastTx = "process-broadcast-transaction-%s" // + Tx ID lockKeyProcessP2PTx = "process-p2p-transaction-%s" // + Tx ID lockKeyProcessSyncTx = "process-sync-transaction-task" - lockKeyProcessXpub = "action-xpub-id-%s" // + Xpub ID - lockKeyRecordBlockHeader = "action-record-block-header-%s" // + Hash id - lockKeyRecordTx = "action-record-transaction-%s" // + Tx ID - lockKeyReserveUtxo = "utxo-reserve-xpub-id-%s" // + Xpub ID + lockKeyProcessXpub = "action-xpub-id-%s" // + Xpub ID + lockKeyRecordTx = "action-record-transaction-%s" // + Tx ID + lockKeyReserveUtxo = "utxo-reserve-xpub-id-%s" // + Xpub ID ) // newWriteLock will take care of creating a lock and defer diff --git a/model_block_headers.go b/model_block_headers.go deleted file mode 100644 index c93b579c..00000000 --- a/model_block_headers.go +++ /dev/null @@ -1,403 +0,0 @@ -package bux - -import ( - "context" - "database/sql" - "encoding/csv" - "encoding/hex" - "errors" - "io" - "io/ioutil" - "os" - "strconv" - "time" - - "github.com/BuxOrg/bux/utils" - "github.com/libsv/go-bc" - "github.com/mrz1836/go-datastore" - customTypes "github.com/mrz1836/go-datastore/custom_types" -) - -// BlockHeader is an object representing the BitCoin block header -// -// Gorm related models & indexes: https://gorm.io/docs/models.html - https://gorm.io/docs/indexes.html -type BlockHeader struct { - // Base model - Model `bson:",inline"` - - // Model specific fields - ID string `json:"id" toml:"id" yaml:"id" gorm:"<-:create;type:char(64);primaryKey;comment:This is the block hash" bson:"_id"` - Height uint32 `json:"height" toml:"height" yaml:"height" gorm:"<-create;uniqueIndex;comment:This is the block height" bson:"height"` - Time uint32 `json:"time" toml:"time" yaml:"time" gorm:"<-create;index;comment:This is the time the block was mined" bson:"time"` - Nonce uint32 `json:"nonce" toml:"nonce" yaml:"nonce" gorm:"<-create;comment:This is the nonce" bson:"nonce"` - Version uint32 `json:"version" toml:"version" yaml:"version" gorm:"<-create;comment:This is the version" bson:"version"` - HashPreviousBlock string `json:"hash_previous_block" toml:"hash_previous_block" yaml:"hash_previous_block" gorm:"<-:create;type:char(64);index;comment:This is the hash of the previous block" bson:"hash_previous_block"` - HashMerkleRoot string `json:"hash_merkle_root" toml:"hash_merkle_root" yaml:"hash_merkle_root" gorm:"<-;type:char(64);index;comment:This is the hash of the merkle root" bson:"hash_merkle_root"` - Bits string `json:"bits" toml:"bits" yaml:"bits" gorm:"<-:create;comment:This is the block difficulty" bson:"bits"` - Synced customTypes.NullTime `json:"synced" toml:"synced" yaml:"synced" gorm:"type:timestamp;index;comment:This is when the block was last synced to the bux server" bson:"synced,omitempty"` -} - -// newBlockHeader will start a new block header model -func newBlockHeader(hash string, height uint32, blockHeader bc.BlockHeader, opts ...ModelOps) (bh *BlockHeader) { - - // Create a new model - bh = &BlockHeader{ - ID: hash, - Height: height, - Model: *NewBaseModel(ModelBlockHeader, opts...), - } - - // Set header info - bh.setHeaderInfo(blockHeader) - return -} - -// GetModelName will get the name of the current model -func (m *BlockHeader) GetModelName() string { - return ModelBlockHeader.String() -} - -// GetModelTableName will get the db table name of the current model -func (m *BlockHeader) GetModelTableName() string { - return tableBlockHeaders -} - -// getBlockHeaders will get all the block headers with the given conditions -func getBlockHeaders(ctx context.Context, metadata *Metadata, conditions *map[string]interface{}, - queryParams *datastore.QueryParams, opts ...ModelOps) ([]*BlockHeader, error) { - - modelItems := make([]*BlockHeader, 0) - if err := getModelsByConditions(ctx, ModelBlockHeader, &modelItems, metadata, conditions, queryParams, opts...); err != nil { - return nil, err - } - - return modelItems, nil -} - -// getBlockHeadersCount will get a count of all the block headers with the given conditions -func getBlockHeadersCount(ctx context.Context, metadata *Metadata, conditions *map[string]interface{}, - opts ...ModelOps) (int64, error) { - - return getModelCountByConditions(ctx, ModelBlockHeader, BlockHeader{}, metadata, conditions, opts...) -} - -// getUnsyncedBlockHeaders will return all block headers that have not been marked as synced -func getUnsyncedBlockHeaders(ctx context.Context, opts ...ModelOps) ([]*BlockHeader, error) { - - // Construct an empty model - var models []BlockHeader - conditions := map[string]interface{}{ - "synced": nil, - } - - // Get the records - if err := getModels( - ctx, NewBaseModel(ModelBlockHeader, opts...).Client().Datastore(), - &models, conditions, nil, defaultDatabaseReadTimeout, - ); err != nil { - if errors.Is(err, datastore.ErrNoResults) { - return nil, nil - } - return nil, err - } - - // Loop and enrich - blockHeaders := make([]*BlockHeader, 0) - for index := range models { - models[index].enrich(ModelBlockHeader, opts...) - blockHeaders = append(blockHeaders, &models[index]) - } - - return blockHeaders, nil -} - -// getLastBlockHeader will return the last block header in the database -func getLastBlockHeader(ctx context.Context, opts ...ModelOps) (*BlockHeader, error) { - - // Construct an empty model - var model []BlockHeader - - queryParams := &datastore.QueryParams{ - Page: 1, - PageSize: 1, - OrderByField: "height", - SortDirection: "desc", - } - - // Get the records - if err := getModels( - ctx, NewBaseModel(ModelBlockHeader, opts...).Client().Datastore(), - &model, nil, queryParams, defaultDatabaseReadTimeout, - ); err != nil { - if errors.Is(err, datastore.ErrNoResults) { - return nil, nil - } - return nil, err - } - - if len(model) == 1 { - blockHeader := model[0] - blockHeader.enrich(ModelBlockHeader, opts...) - return &blockHeader, nil - } - - return nil, nil -} - -// Save will save the model into the Datastore -func (m *BlockHeader) Save(ctx context.Context) (err error) { - return Save(ctx, m) -} - -// GetHash will get the hash of the block header -func (m *BlockHeader) GetHash() string { - return m.ID -} - -// setHeaderInfo will set the block header info from a bc.BlockHeader -func (m *BlockHeader) setHeaderInfo(bh bc.BlockHeader) { - m.Bits = hex.EncodeToString(bh.Bits) - m.HashMerkleRoot = hex.EncodeToString(bh.HashMerkleRoot) - m.HashPreviousBlock = hex.EncodeToString(bh.HashPrevBlock) - m.Nonce = bh.Nonce - m.Time = bh.Time - m.Version = bh.Version -} - -// GetID will return the id of the field (hash) -func (m *BlockHeader) GetID() string { - return m.ID -} - -// getBlockHeaderByHeight will get the block header given by height -func getBlockHeaderByHeight(ctx context.Context, height uint32, opts ...ModelOps) (*BlockHeader, error) { - - // Construct an empty model - blockHeader := &BlockHeader{ - Model: *NewBaseModel(ModelDestination, opts...), - } - - conditions := map[string]interface{}{ - "height": height, - } - - // Get the record - if err := Get(ctx, blockHeader, conditions, true, defaultDatabaseReadTimeout, false); err != nil { - if errors.Is(err, datastore.ErrNoResults) { - return nil, nil - } - return nil, err - } - - return blockHeader, nil -} - -// BeforeCreating will fire before the model is being inserted into the Datastore -func (m *BlockHeader) BeforeCreating(_ context.Context) error { - - m.Client().Logger().Debug(). - Str("blockHeaderID", m.ID). - Msgf("starting: %s BeforeCreating hook...", m.Name()) - - // Test for required field(s) - if len(m.ID) == 0 { - return ErrMissingFieldHash - } - - m.Client().Logger().Debug(). - Str("blockHeaderID", m.ID). - Msgf("end: %s BeforeCreating hook", m.Name()) - return nil -} - -// AfterCreated will fire after the model is created in the Datastore -func (m *BlockHeader) AfterCreated(_ context.Context) error { - m.Client().Logger().Debug(). - Str("blockHeaderID", m.ID). - Msgf("starting: %s AfterCreated hook", m.Name()) - - m.Client().Logger().Debug(). - Str("blockHeaderID", m.ID). - Msgf("end: AfterCreated %d hook", m.Height) - return nil -} - -// Display filter the model for display -func (m *BlockHeader) Display() interface{} { - return m -} - -// Migrate model specific migration on startup -func (m *BlockHeader) Migrate(client datastore.ClientInterface) error { - // import all previous block headers from file - blockHeadersFile := m.Client().ImportBlockHeadersFromURL() - if blockHeadersFile != "" { - ctx := context.Background() - // check whether we have block header 0, then we do not import - blockHeader0, err := getBlockHeaderByHeight(ctx, 0, m.Client().DefaultModelOptions()...) - if err != nil { - // stop execution if block headers import is not successful - // the block headers state can be messed up if they are not imported, or half imported - panic(err.Error()) - } - if blockHeader0 == nil { - // import block headers in the background - m.Client().Logger().Info().Msg("Importing block headers into database") - err = m.importBlockHeaders(ctx, client, blockHeadersFile) - if err != nil { - // stop execution if block headers import is not successful - // the block headers state can be messed up if they are not imported, or half imported - panic(err.Error()) - } - m.Client().Logger().Info().Msg("Successfully imported all block headers into database") - } - } - - return nil -} - -// importBlockHeaders will import the block headers from a file -func (m *BlockHeader) importBlockHeaders(ctx context.Context, client datastore.ClientInterface, - blockHeadersFile string) error { - - file, err := ioutil.TempFile("", "blocks_bux.tsv") - if err != nil { - return err - } - defer func() { - if err = os.Remove(file.Name()); err != nil { - m.Client().Logger().Error().Msg(err.Error()) - } - }() - - if err = utils.DownloadAndUnzipFile( - ctx, m.Client().HTTPClient(), file, blockHeadersFile, - ); err != nil { - return err - } - - blockFile := file.Name() - - /* local file import - var err error - pwd, _ := os.Getwd() - blockFile := pwd + "/blocks/blocks_bux.tsv" - */ - - batchSize := 1000 - if m.Client().Datastore().Engine() == datastore.MongoDB { - batchSize = 10000 - } - models := make([]*BlockHeader, 0) - count := 0 - readModel := func(model *BlockHeader) error { - count++ - - models = append(models, model) - - if count%batchSize == 0 { - // insert in batches of batchSize - if err = client.CreateInBatches(ctx, models, batchSize); err != nil { - return err - } - // reset models - models = make([]*BlockHeader, 0) - } - return nil - } - - // accumulate the models into a slice - if err = m.importCSVFile(ctx, blockFile, readModel); errors.Is(err, io.EOF) { - if count%batchSize != 0 { - // remaining batch - return client.CreateInBatches(ctx, models, batchSize) - } - return nil - } - return err -} - -// importCSVFile will import the block headers from a given CSV file -func (m *BlockHeader) importCSVFile(_ context.Context, blockFile string, - readModel func(model *BlockHeader) error) error { - - CSVFile, err := os.Open(blockFile) //nolint:gosec // file only added by administrator via config - if err != nil { - return err - } - defer func() { - if err = CSVFile.Close(); err != nil { - m.Client().Logger().Error().Msg(err.Error()) - } - }() - - reader := csv.NewReader(CSVFile) - reader.Comma = '\t' // It's a tab-delimited file - reader.FieldsPerRecord = 0 // -1 is variable #, 0 is [0]th line's # - reader.LazyQuotes = true // Some fields are like \t"F" ST.\t - reader.TrimLeadingSpace = false // Keep the fields' whitespace how it is - - // read first line - HEADER - if _, err = reader.Read(); err != nil { - return err - } - - // Read all rows - for { - var row []string - if row, err = reader.Read(); err != nil { - return err - } - - var parsedInt uint64 - if parsedInt, err = strconv.ParseUint(row[1], 10, 32); err != nil { - return err - } - - height := uint32(parsedInt) - - if parsedInt, err = strconv.ParseUint(row[3], 10, 32); err != nil { - return err - } - - nonce := uint32(parsedInt) - - if parsedInt, err = strconv.ParseUint(row[4], 10, 32); err != nil { - return err - } - ver := uint32(parsedInt) - if parsedInt, err = strconv.ParseUint(row[7], 10, 32); err != nil { - return err - } - bits := parsedInt - - var timeField time.Time - if timeField, err = time.Parse("2006-01-02 15:04:05", row[2]); err != nil { - return err - } - - var syncedTime time.Time - if syncedTime, err = time.Parse("2006-01-02 15:04:05", row[8]); err != nil { - return err - } - - // todo: use a function like newBlockHeader? vs making a struct - model := &BlockHeader{ - Bits: strconv.FormatUint(bits, 16), - HashMerkleRoot: row[6], - HashPreviousBlock: row[5], - Height: height, - ID: row[0], - Nonce: nonce, - Synced: customTypes.NullTime{NullTime: sql.NullTime{Valid: true, Time: syncedTime}}, - Time: uint32(timeField.Unix()), - Version: ver, - } - model.Model.CreatedAt = time.Now() - - // call the readModel callback function to add the model to the database - if err = readModel(model); err != nil { - return err - } - } -} diff --git a/models_test.go b/models_test.go index 694f2ef3..35fecba2 100644 --- a/models_test.go +++ b/models_test.go @@ -20,7 +20,6 @@ func TestModelName_String(t *testing.T) { t.Parallel() t.Run("all model names", func(t *testing.T) { - assert.Equal(t, "block_header", ModelBlockHeader.String()) assert.Equal(t, "destination", ModelDestination.String()) assert.Equal(t, "empty", ModelNameEmpty.String()) assert.Equal(t, "metadata", ModelMetadata.String()) @@ -30,7 +29,7 @@ func TestModelName_String(t *testing.T) { assert.Equal(t, "transaction", ModelTransaction.String()) assert.Equal(t, "utxo", ModelUtxo.String()) assert.Equal(t, "xpub", ModelXPub.String()) - assert.Len(t, AllModelNames, 10) + assert.Len(t, AllModelNames, 9) }) } From 1f914f8184c4cbadf434651e13098a6a86b28244 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 10:42:42 +0100 Subject: [PATCH 04/13] feat: rebase fixtures --- examples/client/custom_cron/custom_cron.go | 4 +- model_incoming_transactions.go | 349 --------------------- 2 files changed, 2 insertions(+), 351 deletions(-) delete mode 100644 model_incoming_transactions.go diff --git a/examples/client/custom_cron/custom_cron.go b/examples/client/custom_cron/custom_cron.go index fed09089..922faff2 100644 --- a/examples/client/custom_cron/custom_cron.go +++ b/examples/client/custom_cron/custom_cron.go @@ -11,8 +11,8 @@ import ( func main() { client, err := bux.NewClient( context.Background(), // Set context - bux.WithCronCustmPeriod(bux.CronJobNameDraftTransactionCleanUp, 2*time.Second), - bux.WithCronCustmPeriod(bux.CronJobNameSyncTransactionSync, 4*time.Second), + bux.WithCronCustomPeriod(bux.CronJobNameDraftTransactionCleanUp, 2*time.Second), + bux.WithCronCustomPeriod(bux.CronJobNameSyncTransactionSync, 4*time.Second), ) if err != nil { log.Fatalln("error: " + err.Error()) diff --git a/model_incoming_transactions.go b/model_incoming_transactions.go deleted file mode 100644 index ab81b763..00000000 --- a/model_incoming_transactions.go +++ /dev/null @@ -1,349 +0,0 @@ -package bux - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "time" - - "github.com/BuxOrg/bux/chainstate" - "github.com/libsv/go-bt/v2" - "github.com/mrz1836/go-datastore" - "github.com/rs/zerolog" -) - -// IncomingTransaction is an object representing the incoming (external) transaction (for pre-processing) -// -// Gorm related models & indexes: https://gorm.io/docs/models.html - https://gorm.io/docs/indexes.html -type IncomingTransaction struct { - // Base model - Model `bson:",inline"` - - // Standard transaction model base fields - TransactionBase `bson:",inline"` - - // Model specific fields - Status SyncStatus `json:"status" toml:"status" yaml:"status" gorm:"<-;type:varchar(10);index;comment:This is the status of processing the transaction" bson:"status"` - StatusMessage string `json:"status_message" toml:"status_message" yaml:"status_message" gorm:"<-;type:varchar(512);comment:This is the status message or error" bson:"status_message"` -} - -func emptyIncomingTx(opts ...ModelOps) *IncomingTransaction { - return &IncomingTransaction{ - Model: *NewBaseModel(ModelIncomingTransaction, opts...), - TransactionBase: TransactionBase{}, - Status: SyncStatusReady, - } -} - -// newIncomingTransaction will start a new model -func newIncomingTransaction(hex string, opts ...ModelOps) (*IncomingTransaction, error) { - var btTx *bt.Tx - var err error - - if btTx, err = bt.NewTxFromString(hex); err != nil { - return nil, err - } - - tx := emptyIncomingTx(opts...) - tx.ID = btTx.TxID() - tx.Hex = hex - tx.parsedTx = btTx - - return tx, nil -} - -// getIncomingTransactionsToProcess will get the incoming transactions to process -func getIncomingTransactionsToProcess(ctx context.Context, queryParams *datastore.QueryParams, - opts ...ModelOps, -) ([]*IncomingTransaction, error) { - // Construct an empty model - var models []IncomingTransaction - conditions := map[string]interface{}{ - statusField: statusReady, - } - - if queryParams == nil { - queryParams = &datastore.QueryParams{ - Page: 0, - PageSize: 0, - } - } - queryParams.OrderByField = idField - queryParams.SortDirection = datastore.SortAsc - - // Get the record - if err := getModels( - ctx, NewBaseModel(ModelNameEmpty, opts...).Client().Datastore(), - &models, conditions, queryParams, defaultDatabaseReadTimeout, - ); err != nil { - if errors.Is(err, datastore.ErrNoResults) { - return nil, nil - } - return nil, err - } - - // Loop and enrich - txs := make([]*IncomingTransaction, 0) - for index := range models { - models[index].enrich(ModelIncomingTransaction, opts...) - txs = append(txs, &models[index]) - } - - return txs, nil -} - -// GetModelName will get the name of the current model -func (m *IncomingTransaction) GetModelName() string { - return ModelIncomingTransaction.String() -} - -// GetModelTableName will get the db table name of the current model -func (m *IncomingTransaction) GetModelTableName() string { - return tableIncomingTransactions -} - -// Save will save the model into the Datastore -func (m *IncomingTransaction) Save(ctx context.Context) error { - return Save(ctx, m) -} - -// GetID will get the ID -func (m *IncomingTransaction) GetID() string { - return m.ID -} - -func (m *IncomingTransaction) toTransactionDto() *Transaction { - t := Transaction{} - t.Hex = m.Hex - - t.parsedTx = m.parsedTx - t.rawXpubKey = m.rawXpubKey - t.setXPubID() - t.setID() //nolint:errcheck,gosec // error is not needed - - t.Metadata = m.Metadata - t.NumberOfOutputs = uint32(len(m.parsedTx.Outputs)) - t.NumberOfInputs = uint32(len(m.parsedTx.Inputs)) - - return &t -} - -// BeforeCreating will fire before the model is being inserted into the Datastore -func (m *IncomingTransaction) BeforeCreating(ctx context.Context) error { - m.Client().Logger().Debug(). - Str("txID", m.GetID()). - Msgf("starting: %s BeforeCreating hook...", m.Name()) - - // Set status - m.Status = SyncStatusReady - - // Make sure ID is valid - if len(m.ID) == 0 { - return ErrMissingFieldID - } - if len(m.Hex) == 0 { - return ErrMissingFieldHex - } - - // Attempt to parse - if len(m.Hex) > 0 && m.TransactionBase.parsedTx == nil { - m.TransactionBase.parsedTx, _ = bt.NewTxFromString(m.Hex) - } - - // Require the tx to be parsed - if m.TransactionBase.parsedTx == nil { - return ErrTransactionNotParsed - } - - // Check that the transaction has >= 1 known destination - if !m.TransactionBase.hasOneKnownDestination(ctx, m.Client()) { - return ErrNoMatchingOutputs - } - - m.Client().Logger().Debug(). - Str("txID", m.GetID()). - Msgf("end: %s BeforeCreating hook", m.Name()) - return nil -} - -// AfterCreated will fire after the model is created -func (m *IncomingTransaction) AfterCreated(_ context.Context) error { - m.Client().Logger().Debug(). - Str("txID", m.GetID()). - Msgf("starting: %s AfterCreated hook...", m.Name()) - - // todo: this should be refactored into a task - if err := processIncomingTransaction(context.Background(), m.Client().Logger(), m); err != nil { - m.Client().Logger().Error(). - Str("txID", m.GetID()). - Msgf("error processing incoming transaction: %v", err.Error()) - } - - m.Client().Logger().Debug(). - Str("txID", m.GetID()). - Msgf("end: %s AfterCreated hook", m.Name()) - return nil -} - -// Migrate model specific migration on startup -func (m *IncomingTransaction) Migrate(client datastore.ClientInterface) error { - return client.IndexMetadata(client.GetTableName(tableIncomingTransactions), metadataField) -} - -// processIncomingTransactions will process incoming transaction records -func processIncomingTransactions(ctx context.Context, logClient *zerolog.Logger, maxTransactions int, - opts ...ModelOps, -) error { - queryParams := &datastore.QueryParams{Page: 1, PageSize: maxTransactions} - - // Get x records: - records, err := getIncomingTransactionsToProcess( - ctx, queryParams, opts..., - ) - if err != nil { - return err - } else if len(records) == 0 { - return nil - } - - if logClient != nil { - logClient.Info().Msgf("found %d incoming transactions to process", len(records)) - } - - // Process the incoming transaction - for index := range records { - if err = processIncomingTransaction( - ctx, logClient, records[index], - ); err != nil { - return err - } - } - - return nil -} - -// processIncomingTransaction will process the incoming transaction record into a transaction, or save the failure -func processIncomingTransaction(ctx context.Context, logClient *zerolog.Logger, - incomingTx *IncomingTransaction, -) error { - if logClient == nil { - logClient = incomingTx.client.Logger() - } - - logClient.Info().Str("txID", incomingTx.GetID()).Msgf("processIncomingTransaction(): transaction: %v", incomingTx) - - // Successfully capture any panics, convert to readable string and log the error - defer recoverAndLog(incomingTx.client.Logger()) - - // Create the lock and set the release for after the function completes - unlock, err := newWriteLock( - ctx, fmt.Sprintf(lockKeyProcessIncomingTx, incomingTx.GetID()), incomingTx.Client().Cachestore(), - ) - defer unlock() - if err != nil { - return err - } - - // Find in mempool or on-chain - var txInfo *chainstate.TransactionInfo - if txInfo, err = incomingTx.Client().Chainstate().QueryTransactionFastest( - ctx, incomingTx.ID, chainstate.RequiredInMempool, defaultQueryTxTimeout, - ); err != nil { - - logClient.Error(). - Str("txID", incomingTx.GetID()). - Msgf("error finding transaction %s on chain. Reason: %s", incomingTx.ID, err) - - // TX might not have been broadcast yet? (race condition, or it was never broadcast...) - if errors.Is(err, chainstate.ErrTransactionNotFound) { - var provider string - - // Broadcast and detect if there is a real error - if provider, err = incomingTx.Client().Chainstate().Broadcast( - ctx, incomingTx.ID, incomingTx.Hex, defaultQueryTxTimeout, - ); err != nil { - bailAndSaveIncomingTransaction(ctx, incomingTx, "tx was not found using all providers, attempted broadcast, "+err.Error()) - return err - } - - // Broadcast was successful, so the transaction was accepted by the network, continue processing like before - logClient.Info(). - Str("txID", incomingTx.GetID()). - Msgf("broadcast of transaction was successful using %s. Incoming tx will be processed again.", provider) - - // allow propagation - time.Sleep(3 * time.Second) - return nil // reprocess it when triggering the task again - } - - // Actual error occurred - bailAndSaveIncomingTransaction(ctx, incomingTx, err.Error()) - return err - } - - if !txInfo.Valid() { - logClient.Warn().Str("txID", incomingTx.ID).Msg("txInfo is invalid, will try again later") - - if incomingTx.client.IsDebug() { - txInfoJSON, _ := json.Marshal(txInfo) //nolint:nolintlint,nilerr,govet,errchkjson // error is not needed - logClient.Debug().Str("txID", incomingTx.ID).Msg(string(txInfoJSON)) - } - return nil - } - - logClient.Info().Str("txID", incomingTx.ID).Msgf("found incoming transaction in %s", txInfo.Provider) - - // Check if we have transaction in DB already - transaction, _ := getTransactionByID( - ctx, incomingTx.rawXpubKey, incomingTx.ID, incomingTx.client.DefaultModelOptions()..., - ) - - if transaction == nil { - // Create the new transaction model - if transaction, err = newTransactionFromIncomingTransaction(incomingTx); err != nil { - logClient.Error().Str("txID", incomingTx.ID).Msgf("creating a new tx failed. Reason: %s", err) - return err - } - - if err = transaction.processUtxos(ctx); err != nil { - logClient.Error(). - Str("txID", incomingTx.ID). - Msgf("processing utxos for tx failed. Reason: %s", err) - return err - } - } - - transaction.setChainInfo(txInfo) - - // Create status message - onChain := len(transaction.BlockHash) > 0 || transaction.BlockHeight > 0 - message := "transaction was found in mempool by " + txInfo.Provider - if onChain { - message = "transaction was found on-chain by " + txInfo.Provider - } - - // Save (add) the transaction (should NOT error) - if err = transaction.Save(ctx); err != nil { - bailAndSaveIncomingTransaction(ctx, incomingTx, err.Error()) - return err - } - - // Update (or delete?) the incoming transaction record - incomingTx.Status = statusComplete - incomingTx.StatusMessage = message - if err = incomingTx.Save(ctx); err != nil { - bailAndSaveIncomingTransaction(ctx, incomingTx, err.Error()) - return err - } - - // Done! - return nil -} - -// bailAndSaveIncomingTransaction try to save the error message -func bailAndSaveIncomingTransaction(ctx context.Context, incomingTx *IncomingTransaction, errorMessage string) { - incomingTx.Status = statusError - incomingTx.StatusMessage = errorMessage - _ = incomingTx.Save(ctx) -} From 9b37b1b02db8185b8e6c57fbc0b1e24a016cb47b Mon Sep 17 00:00:00 2001 From: wregulski Date: Tue, 30 Jan 2024 10:05:05 +0100 Subject: [PATCH 05/13] feat: update broadcast client dep --- go.mod | 2 +- go.sum | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/go.mod b/go.mod index 1a91ebd1..a3be03a9 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21.5 require ( github.com/DATA-DOG/go-sqlmock v1.5.2 - github.com/bitcoin-sv/go-broadcast-client v0.16.0 + github.com/bitcoin-sv/go-broadcast-client v0.16.1 github.com/bitcoin-sv/go-paymail v0.12.1 github.com/bitcoinschema/go-bitcoin/v2 v2.0.5 github.com/bitcoinschema/go-map v0.1.0 diff --git a/go.sum b/go.sum index 184266a8..af50269c 100644 --- a/go.sum +++ b/go.sum @@ -20,6 +20,8 @@ github.com/aws/aws-sdk-go v1.43.45 h1:2708Bj4uV+ym62MOtBnErm/CDX61C4mFe9V2gXy1ca github.com/aws/aws-sdk-go v1.43.45/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/bitcoin-sv/go-broadcast-client v0.16.0 h1:KadOLv+i9Y6xAOkHsSl2PIECQ59SpUyYurY6Ysvpz5A= github.com/bitcoin-sv/go-broadcast-client v0.16.0/go.mod h1:GRAliwumNBjEbLRIEkXqIKJpsgmMfjvlIDqgyw/NoJE= +github.com/bitcoin-sv/go-broadcast-client v0.16.1 h1:VG4QZwJEVQY/QQupTDeLMw+PEeqh9mn4id+XzYpAmHs= +github.com/bitcoin-sv/go-broadcast-client v0.16.1/go.mod h1:GRAliwumNBjEbLRIEkXqIKJpsgmMfjvlIDqgyw/NoJE= github.com/bitcoin-sv/go-paymail v0.12.1 h1:MDdMFFOZalymT5O5WDUN0EVVWdn3ygo6EhKsWimkM/E= github.com/bitcoin-sv/go-paymail v0.12.1/go.mod h1:/BGu//F4Ji7jIzvkcHxlwBB9vU90yVRx/tovX91Tbw0= github.com/bitcoinschema/go-bitcoin/v2 v2.0.5 h1:Sgh5Eb746Zck/46rFDrZZEXZWyO53fMuWYhNoZa1tck= From ba6bd6ac02173e604b78411fb9b716690e91db52 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 10:08:46 +0100 Subject: [PATCH 06/13] feat: add update transaction business logic --- action_transaction.go | 41 +++++++++++++++++++++++++++++++ chainstate/broadcast_providers.go | 9 +++++-- chainstate/client.go | 10 ++++++++ chainstate/client_options.go | 8 ++++++ client_options.go | 7 ++++++ cron_job_declarations.go | 2 +- interface.go | 2 ++ utils/utils.go | 19 ++++++++++++++ 8 files changed, 95 insertions(+), 3 deletions(-) diff --git a/action_transaction.go b/action_transaction.go index f6123d7e..3d6af23d 100644 --- a/action_transaction.go +++ b/action_transaction.go @@ -9,6 +9,8 @@ import ( "github.com/BuxOrg/bux/chainstate" "github.com/BuxOrg/bux/utils" + "github.com/bitcoin-sv/go-broadcast-client/broadcast" + "github.com/libsv/go-bc" "github.com/libsv/go-bt" "github.com/mrz1836/go-datastore" ) @@ -381,6 +383,45 @@ func (c *Client) RevertTransaction(ctx context.Context, id string) error { return err } +// UpdateTransaction will update the broadcast callback transaction info, like: block height, block hash, status, bump. +func (c *Client) UpdateTransaction(ctx context.Context, callbackResp *broadcast.SubmittedTx) error { + bump, err := bc.NewBUMPFromStr(callbackResp.MerklePath) + if err != nil { + msg := fmt.Sprintf("failed to parse merkle path from broadcast callback - tx: %v", callbackResp) + c.options.logger.Err(err).Msg(msg) + return err + } + + txInfo := &chainstate.TransactionInfo{ + BlockHash: callbackResp.BlockHash, + BlockHeight: callbackResp.BlockHeight, + ID: callbackResp.TxID, + TxStatus: callbackResp.TxStatus, + BUMP: bump, + // it's not possible to get confirmations from broadcast client; zero would be treated as "not confirmed" that's why -1 + Confirmations: -1, + } + + ids := []string{txInfo.ID} + // we use GetTransactionsByIDs to reuse existing function as the main one requires xpubID + txs, err := c.GetTransactionsByIDs(ctx, ids) + if err != nil || len(txs) != 1 { + msg := fmt.Sprintf("failed to get transaction by id while processing callback: %v", txInfo.ID) + c.options.logger.Err(err).Msg(msg) + return err + } + + tx := txs[0] + tx.setChainInfo(txInfo) + if err = tx.Save(ctx); err != nil { + msg := fmt.Sprintf("failed to save transaction while processing callback: %v", txInfo.ID) + c.options.logger.Err(err).Msg(msg) + return err + } + + return nil +} + func generateTxIDFilterConditions(txIDs []string) *map[string]interface{} { orConditions := make([]map[string]interface{}, len(txIDs)) diff --git a/chainstate/broadcast_providers.go b/chainstate/broadcast_providers.go index 6b3c064a..e66bc419 100644 --- a/chainstate/broadcast_providers.go +++ b/chainstate/broadcast_providers.go @@ -94,14 +94,19 @@ func (provider broadcastClientProvider) broadcast(ctx context.Context, c *Client return broadcastWithBroadcastClient(ctx, c, provider.txID, provider.txHex) } -func broadcastWithBroadcastClient(ctx context.Context, client ClientInterface, txID, hex string) error { +func broadcastWithBroadcastClient(ctx context.Context, client *Client, txID, hex string) error { debugLog(client, txID, "executing broadcast request for "+ProviderBroadcastClient) tx := broadcast.Transaction{ Hex: hex, } - result, err := client.BroadcastClient().SubmitTransaction(ctx, &tx, broadcast.WithRawFormat()) + result, err := client.BroadcastClient().SubmitTransaction( + ctx, + &tx, + broadcast.WithRawFormat(), + broadcast.WithCallback(client.options.config.callbackURL, client.options.config.callbackToken), + ) if err != nil { debugLog(client, txID, "error broadcast request for "+ProviderBroadcastClient+" failed: "+err.Error()) return err diff --git a/chainstate/client.go b/chainstate/client.go index 16a88f34..01c3451f 100644 --- a/chainstate/client.go +++ b/chainstate/client.go @@ -32,6 +32,8 @@ type ( // syncConfig holds all the configuration about the different sync processes syncConfig struct { + callbackURL string // Broadcast callback URL + callbackToken string // Broadcast callback access token excludedProviders []string // List of provider names httpClient HTTPInterface // Custom HTTP client (Minercraft, WOC) minercraftConfig *minercraftConfig // minercraftConfig configuration @@ -199,3 +201,11 @@ func (c *Client) checkFeeUnit() error { } return nil } + +// func (c *Client) getCallbackRoute() error { +// // return c.options.config. +// } + +func (c *Client) getCallbackToken() string { + return c.options.config.callbackToken +} diff --git a/chainstate/client_options.go b/chainstate/client_options.go index 262d8122..fd4ecfb0 100644 --- a/chainstate/client_options.go +++ b/chainstate/client_options.go @@ -165,3 +165,11 @@ func WithConnectionToPulse(url, authToken string) ClientOps { c.config.pulseClient = newPulseClientProvider(url, authToken) } } + +// WithCallback will set broadcast callback settings +func WithCallback(callbackURL, callbackAuthToken string) ClientOps { + return func(c *clientOptions) { + c.config.callbackURL = callbackURL + c.config.callbackToken = callbackAuthToken + } +} diff --git a/client_options.go b/client_options.go index 67562eaf..776a5bfa 100644 --- a/client_options.go +++ b/client_options.go @@ -662,3 +662,10 @@ func WithBroadcastClient(broadcastClient broadcast.Client) ClientOps { c.chainstate.options = append(c.chainstate.options, chainstate.WithBroadcastClient(broadcastClient)) } } + +// WithCallback set callback settings +func WithCallback(callbackURL string, callbackToken string) ClientOps { + return func(c *clientOptions) { + c.chainstate.options = append(c.chainstate.options, chainstate.WithCallback(callbackURL, callbackToken)) + } +} diff --git a/cron_job_declarations.go b/cron_job_declarations.go index 68a05957..f1d6c594 100644 --- a/cron_job_declarations.go +++ b/cron_job_declarations.go @@ -35,7 +35,7 @@ func (c *Client) cronJobs() taskmanager.CronJobs { Handler: handler(taskBroadcastTransactions), }, CronJobNameSyncTransactionSync: { - Period: 120 * time.Second, + Period: 600 * time.Second, Handler: handler(taskSyncTransactions), }, } diff --git a/interface.go b/interface.go index cfd8905b..51838b6e 100644 --- a/interface.go +++ b/interface.go @@ -8,6 +8,7 @@ import ( "github.com/BuxOrg/bux/cluster" "github.com/BuxOrg/bux/notifications" "github.com/BuxOrg/bux/taskmanager" + "github.com/bitcoin-sv/go-broadcast-client/broadcast" "github.com/bitcoin-sv/go-paymail" "github.com/mrz1836/go-cachestore" "github.com/mrz1836/go-datastore" @@ -132,6 +133,7 @@ type TransactionService interface { RecordTransaction(ctx context.Context, xPubKey, txHex, draftID string, opts ...ModelOps) (*Transaction, error) RecordRawTransaction(ctx context.Context, txHex string, opts ...ModelOps) (*Transaction, error) + UpdateTransaction(ctx context.Context, txInfo *broadcast.SubmittedTx) error UpdateTransactionMetadata(ctx context.Context, xPubID, id string, metadata Metadata) (*Transaction, error) RevertTransaction(ctx context.Context, id string) error } diff --git a/utils/utils.go b/utils/utils.go index fef71ea3..e23c2d8f 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -8,6 +8,8 @@ import ( "crypto/sha256" "encoding/binary" "encoding/hex" + "fmt" + "hash/adler32" "math" "strconv" @@ -95,6 +97,23 @@ func LittleEndianBytes64(value uint64, resultLength uint32) []byte { return buf } +// HashAdler32 returns computed string calculated with Adler32 function. +func HashAdler32(input string) (string, error) { + if input == "" { + return "", fmt.Errorf("input string is empty - cannot apply adler32 hash function") + } + data := []byte(input) + hasher := adler32.New() + _, err := hasher.Write(data) + if err != nil { + return "", err + } + + sum := hasher.Sum32() + + return fmt.Sprintf("%08x", sum), nil +} + // SafeAssign - Assigns value (not pointer) the src to dest if src is not nil func SafeAssign[T any](dest *T, src *T) { if src != nil { From 54702ea479fd6ae4d7c11af220e7c0c2219b3e89 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 10:14:10 +0100 Subject: [PATCH 07/13] feat: simplifies UpdateTransaction bl --- action_transaction.go | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/action_transaction.go b/action_transaction.go index 3d6af23d..54eac381 100644 --- a/action_transaction.go +++ b/action_transaction.go @@ -402,16 +402,13 @@ func (c *Client) UpdateTransaction(ctx context.Context, callbackResp *broadcast. Confirmations: -1, } - ids := []string{txInfo.ID} - // we use GetTransactionsByIDs to reuse existing function as the main one requires xpubID - txs, err := c.GetTransactionsByIDs(ctx, ids) - if err != nil || len(txs) != 1 { + tx, err := c.GetTransaction(ctx, "", txInfo.ID) + if err != nil { msg := fmt.Sprintf("failed to get transaction by id while processing callback: %v", txInfo.ID) c.options.logger.Err(err).Msg(msg) return err } - tx := txs[0] tx.setChainInfo(txInfo) if err = tx.Save(ctx); err != nil { msg := fmt.Sprintf("failed to save transaction while processing callback: %v", txInfo.ID) From 47077a192da5b6d8c9877ba849539be031d28d96 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 10:44:41 +0100 Subject: [PATCH 08/13] feat: remove unused methods --- chainstate/client.go | 8 -------- 1 file changed, 8 deletions(-) diff --git a/chainstate/client.go b/chainstate/client.go index 01c3451f..9cfe320a 100644 --- a/chainstate/client.go +++ b/chainstate/client.go @@ -201,11 +201,3 @@ func (c *Client) checkFeeUnit() error { } return nil } - -// func (c *Client) getCallbackRoute() error { -// // return c.options.config. -// } - -func (c *Client) getCallbackToken() string { - return c.options.config.callbackToken -} From bc356dc60b2beddffab6c69343f0666d319b72a3 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 10:45:57 +0100 Subject: [PATCH 09/13] chore: add vscode folder to .gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 4165044d..8c56f94c 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,9 @@ go.list # Jetbrains .idea/ +#VSCode +.vscode/ + # Eclipse .project From dbff3db32b4f30fcfbfe7ef13fbb6a1a5211a406 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 12:27:00 +0100 Subject: [PATCH 10/13] fix: use Msgf instead of Msg --- action_transaction.go | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/action_transaction.go b/action_transaction.go index 54eac381..01d0f1f5 100644 --- a/action_transaction.go +++ b/action_transaction.go @@ -387,8 +387,7 @@ func (c *Client) RevertTransaction(ctx context.Context, id string) error { func (c *Client) UpdateTransaction(ctx context.Context, callbackResp *broadcast.SubmittedTx) error { bump, err := bc.NewBUMPFromStr(callbackResp.MerklePath) if err != nil { - msg := fmt.Sprintf("failed to parse merkle path from broadcast callback - tx: %v", callbackResp) - c.options.logger.Err(err).Msg(msg) + c.options.logger.Err(err).Msgf("failed to parse merkle path from broadcast callback - tx: %v", callbackResp) return err } @@ -404,15 +403,13 @@ func (c *Client) UpdateTransaction(ctx context.Context, callbackResp *broadcast. tx, err := c.GetTransaction(ctx, "", txInfo.ID) if err != nil { - msg := fmt.Sprintf("failed to get transaction by id while processing callback: %v", txInfo.ID) - c.options.logger.Err(err).Msg(msg) + c.options.logger.Err(err).Msgf("failed to get transaction by id while processing callback: %v", txInfo.ID) return err } tx.setChainInfo(txInfo) if err = tx.Save(ctx); err != nil { - msg := fmt.Sprintf("failed to save transaction while processing callback: %v", txInfo.ID) - c.options.logger.Err(err).Msg(msg) + c.options.logger.Err(err).Msgf("failed to save transaction while processing callback: %v", txInfo.ID) return err } From 1128bff60dfff5542d3a437c79a225cd0fcc7302 Mon Sep 17 00:00:00 2001 From: wregulski Date: Wed, 31 Jan 2024 14:41:07 +0100 Subject: [PATCH 11/13] fix: make code reusable and update sync transaction --- action_transaction.go | 10 +++++----- sync_tx_repository.go | 19 +++++++++++++++++++ sync_tx_service.go | 14 ++++++-------- 3 files changed, 30 insertions(+), 13 deletions(-) diff --git a/action_transaction.go b/action_transaction.go index 01d0f1f5..74aaa3cd 100644 --- a/action_transaction.go +++ b/action_transaction.go @@ -403,17 +403,17 @@ func (c *Client) UpdateTransaction(ctx context.Context, callbackResp *broadcast. tx, err := c.GetTransaction(ctx, "", txInfo.ID) if err != nil { - c.options.logger.Err(err).Msgf("failed to get transaction by id while processing callback: %v", txInfo.ID) + c.options.logger.Err(err).Msgf("failed to get transaction by id: %v", txInfo.ID) return err } - tx.setChainInfo(txInfo) - if err = tx.Save(ctx); err != nil { - c.options.logger.Err(err).Msgf("failed to save transaction while processing callback: %v", txInfo.ID) + syncTx, err := GetSyncTransactionByTxID(ctx, txInfo.ID, c.DefaultModelOptions()...) + if err != nil { + c.options.logger.Err(err).Msgf("failed to get sync transaction by tx id: %v", txInfo.ID) return err } - return nil + return processSyncTxSave(ctx, txInfo, syncTx, tx) } func generateTxIDFilterConditions(txIDs []string) *map[string]interface{} { diff --git a/sync_tx_repository.go b/sync_tx_repository.go index 8532521e..ecb584e3 100644 --- a/sync_tx_repository.go +++ b/sync_tx_repository.go @@ -30,6 +30,25 @@ func GetSyncTransactionByID(ctx context.Context, id string, opts ...ModelOps) (* return txs[0], nil } +// GetSyncTransactionByTxID will get a sync transaction by it's transaction id. +func GetSyncTransactionByTxID(ctx context.Context, txID string, opts ...ModelOps) (*SyncTransaction, error) { + // Get the records by status + txs, err := _getSyncTransactionsByConditions(ctx, + map[string]interface{}{ + txID: txID, + }, + nil, opts..., + ) + if err != nil { + return nil, err + } + if len(txs) != 1 { + return nil, nil + } + + return txs[0], nil +} + /*** /exported funcs ***/ /*** public unexported funcs ***/ diff --git a/sync_tx_service.go b/sync_tx_service.go index 9f707d65..d5acc738 100644 --- a/sync_tx_service.go +++ b/sync_tx_service.go @@ -221,14 +221,17 @@ func _syncTxDataFromChain(ctx context.Context, syncTx *SyncTransaction, transact } return err } + return processSyncTxSave(ctx, txInfo, syncTx, transaction) +} +func processSyncTxSave(ctx context.Context, txInfo *chainstate.TransactionInfo, syncTx *SyncTransaction, transaction *Transaction) error { if !txInfo.Valid() { syncTx.Client().Logger().Warn(). Str("txID", syncTx.ID). Msgf("txInfo is invalid, will try again later") if syncTx.Client().IsDebug() { - txInfoJSON, _ := json.Marshal(txInfo) //nolint:errchkjson // error is not needed + txInfoJSON, _ := json.Marshal(txInfo) syncTx.Client().Logger().Debug(). Str("txID", syncTx.ID). Msgf("txInfo: %s", string(txInfoJSON)) @@ -238,18 +241,15 @@ func _syncTxDataFromChain(ctx context.Context, syncTx *SyncTransaction, transact transaction.setChainInfo(txInfo) - // Create status message message := "transaction was found on-chain by " + chainstate.ProviderBroadcastClient - // Save the transaction (should NOT error) - if err = transaction.Save(ctx); err != nil { + if err := transaction.Save(ctx); err != nil { _bailAndSaveSyncTransaction( ctx, syncTx, SyncStatusError, syncActionSync, "internal", err.Error(), ) return err } - // Update the sync status syncTx.SyncStatus = SyncStatusComplete syncTx.Results.LastMessage = message syncTx.Results.Results = append(syncTx.Results.Results, &SyncResult{ @@ -259,8 +259,7 @@ func _syncTxDataFromChain(ctx context.Context, syncTx *SyncTransaction, transact StatusMessage: message, }) - // Update the sync transaction record - if err = syncTx.Save(ctx); err != nil { + if err := syncTx.Save(ctx); err != nil { _bailAndSaveSyncTransaction(ctx, syncTx, SyncStatusError, syncActionSync, "internal", err.Error()) return err } @@ -268,7 +267,6 @@ func _syncTxDataFromChain(ctx context.Context, syncTx *SyncTransaction, transact syncTx.Client().Logger().Info(). Str("txID", syncTx.ID). Msgf("Transaction processed successfully") - // Done! return nil } From 520ee23d5d874bbe4eeb752341c226cc6c5facdd Mon Sep 17 00:00:00 2001 From: wregulski Date: Thu, 1 Feb 2024 12:02:01 +0100 Subject: [PATCH 12/13] fix: update field name in sync_tx_repo --- sync_tx_repository.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sync_tx_repository.go b/sync_tx_repository.go index ecb584e3..42127bc1 100644 --- a/sync_tx_repository.go +++ b/sync_tx_repository.go @@ -35,7 +35,7 @@ func GetSyncTransactionByTxID(ctx context.Context, txID string, opts ...ModelOps // Get the records by status txs, err := _getSyncTransactionsByConditions(ctx, map[string]interface{}{ - txID: txID, + idField: txID, }, nil, opts..., ) From 9d1046d43aa77cc5780af0fe9e0f3431b1650bb7 Mon Sep 17 00:00:00 2001 From: wregulski Date: Thu, 1 Feb 2024 13:10:52 +0100 Subject: [PATCH 13/13] feat: change number of bux version --- definitions.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/definitions.go b/definitions.go index 2972f487..d234f473 100644 --- a/definitions.go +++ b/definitions.go @@ -20,7 +20,7 @@ const ( dustLimit = uint64(1) // Dust limit mongoTestVersion = "6.0.4" // Mongo Testing Version sqliteTestVersion = "3.37.0" // SQLite Testing Version (dummy version for now) - version = "v0.13.0" // bux version + version = "v0.14.2" // bux version ) // All the base models