From d09b7f4d0fb6d2e04349b65fc7359585ea75fc37 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Diego=20Fern=C3=A1ndez=20Barrera?= Date: Sat, 14 Mar 2020 17:06:21 +0100 Subject: [PATCH 1/4] feat: Allow to disable some crawlers If not configuration object is provided, crawlers wont start. --- backend.config.js | 36 ++++++-- index.js | 2 +- lib/BackendV3.js | 66 ++++++++------- lib/crawlers/activeAccounts.js | 3 +- lib/crawlers/blockHarvester.js | 3 +- lib/crawlers/blockListener.js | 4 +- lib/crawlers/phragmen.js | 147 +++++++++++++++++++-------------- lib/crawlers/rewards.js | 4 +- lib/crawlers/staking.js | 3 +- 9 files changed, 159 insertions(+), 109 deletions(-) diff --git a/backend.config.js b/backend.config.js index 9576495a..5518618f 100644 --- a/backend.config.js +++ b/backend.config.js @@ -1,7 +1,9 @@ +// Also wss://kusama-rpc.polkadot.io +const DEFAULT_WS_PROVIDER_URL = 'ws://substrate-node:9944'; + module.exports = { - // Local Polkadot Kusama node - wsProviderUrl: 'ws://substrate-node:9944', - // Postgres database connection params + wsProviderUrl: process.env.WS_PROVIDER_URL || DEFAULT_WS_PROVIDER_URL, + postgresConnParams: { user: 'polkastats', host: 'postgres', @@ -9,12 +11,32 @@ module.exports = { password: 'polkastats', port: 5432, }, - BLOCK_HARVESTER_POLLING_TIME: 1 * 60 * 1000, // Run every 10 min - ACTIVE_ACCOUNTS_POLLING_TIME: 1 * 60 * 1000, // Run every 60 min - PHRAGMEN_POLLING_TIME: 5 * 60 * 1000, // Run every 5 min + + staking: { + enabled: true, + }, + + rewards: { + enabled: true, + }, + + blockListener: { + enabled: true, + }, + + blockHarvester: { + enabled: true, + pollingTime: 1 * 60 * 1000, + }, + + accounts: { + enabled: true, + pollingTime: 1 * 60 * 1000, + }, phragmen: { - wsProviderUrl: 'ws://substrate-node:9944', + enabled: true, + pollingTime: 5 * 60 * 1000, phragmenOutputDir: '/tmp/phragmen', offlinePhragmenPath: '/usr/app/polkastats-backend-v3/offline-phragmen', }, diff --git a/index.js b/index.js index 393f6ed3..3d74b020 100644 --- a/index.js +++ b/index.js @@ -1,9 +1,9 @@ - const config = require('./backend.config.js'); const BackendV3 = require('./lib/BackendV3.js'); async function main () { const backendV3 = new BackendV3(config); + backendV3.runCrawlers(); } main().catch((error) => { diff --git a/lib/BackendV3.js b/lib/BackendV3.js index 9ace2d72..3cf0b506 100644 --- a/lib/BackendV3.js +++ b/lib/BackendV3.js @@ -14,49 +14,50 @@ class BackendV3 { constructor(config) { this.config = config; this.nodeisSyncing = true; - this.runCrawlers(); } + async runCrawlers() { - let api; - api = await this.getPolkadotAPI(); + const pool = await this.getPool(); + + let api = await this.getPolkadotAPI(); while (!api) { await wait(10000); api = await this.getPolkadotAPI(); } - const pool = await this.getPool(); - - console.log(`[PolkaStats backend v3] - \x1b[32m${JSON.stringify(this.config, null, 2)}\x1b[0m`); console.log(`[PolkaStats backend v3] - \x1b[32mRunning crawlers\x1b[0m`); - // Listen to new blocks and system events and add them to database - console.log(`[PolkaStats backend v3] - \x1b[32mStarting block listener...\x1b[0m`); - blockListener(api, pool); - // Get rewards - console.log(`[PolkaStats backend v3] - \x1b[32mStarting rewards crawler...\x1b[0m`); - rewards(api, pool); - // Fill the gaps in block and event tables - setTimeout(() => { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting block harvester...\x1b[0m`); - blockHarvester(api, pool, this.config); - }, this.config.BLOCK_HARVESTER_POLLING_TIME); - // Get information about active accounts - setTimeout(() => { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`); - activeAccounts(api, pool, this.config); - }, this.config.ACTIVE_ACCOUNTS_POLLING_TIME); - // Get offline phragmen output - setInterval( - () => phragmen(api, pool, this.config.phragmen), - this.config.PHRAGMEN_POLLING_TIME, - ); - // Get staking info - setTimeout(() => { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting staking crawler...\x1b[0m`); - staking(api, pool); - }, this.config.STAKING_POLLING_TIME); + + if (this.config.blockListener && this.config.blockListener.enabled) { + blockListener(api, pool, this.config.blockListener); + } + + if (this.config.rewards && this.config.rewards.enabled) { + rewards(api, pool, this.config.rewards); + } + + if (this.config.blockHarvester && this.config.blockHarvester.enabled) { + blockHarvester(api, pool, this.config.blockHarvester); + } + + if (this.config.accounts && this.config.accounts.enabled) { + activeAccounts(api, pool, this.config.accounts); + } + + if (this.config.staking && this.config.staking.enabled) { + staking(api, pool, this.config.staking); + } + + if (this.config.phragmen && this.config.phragmen.enabled) { + phragmen(api, pool, { + ...this.config.phragmen, + wsProviderUrl: this.config.wsProviderUrl, + }); + } } + async getPolkadotAPI() { console.log(`[PolkaStats backend v3] - \x1b[32mConnecting to ${this.config.wsProviderUrl}\x1b[0m`); + const provider = new WsProvider(this.config.wsProviderUrl); const api = await ApiPromise.create({ provider }); await api.isReady; @@ -87,6 +88,7 @@ class BackendV3 { } return false; } + async getPool() { const pool = new Pool(this.config.postgresConnParams); await pool.connect(); diff --git a/lib/crawlers/activeAccounts.js b/lib/crawlers/activeAccounts.js index 048f74af..001ec7a0 100644 --- a/lib/crawlers/activeAccounts.js +++ b/lib/crawlers/activeAccounts.js @@ -1,8 +1,7 @@ // @ts-check module.exports = { activeAccounts: async function (api, pool, config) { - - const POLLING_TIME = config.ACTIVE_ACCOUNTS_POLLING_TIME; + console.log(`[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`); // Fetch active accounts const accounts = await api.derive.accounts.indexes(); diff --git a/lib/crawlers/blockHarvester.js b/lib/crawlers/blockHarvester.js index d1a7cbe1..03e06f08 100644 --- a/lib/crawlers/blockHarvester.js +++ b/lib/crawlers/blockHarvester.js @@ -3,12 +3,11 @@ const { shortHash } = require('../utils.js'); module.exports = { blockHarvester: async function (api, pool, config) { + console.log(`[PolkaStats backend v3] - \x1b[32mStarting block harvester...\x1b[0m`); // Start execution const startTime = new Date().getTime(); - const POLLING_TIME = config.BLOCK_HARVESTER_POLLING_TIME; - let addedBlocks = 0; // Get gaps from block table diff --git a/lib/crawlers/blockListener.js b/lib/crawlers/blockListener.js index f158a0d0..34a9596d 100644 --- a/lib/crawlers/blockListener.js +++ b/lib/crawlers/blockListener.js @@ -2,7 +2,9 @@ const { shortHash } = require('../utils.js'); module.exports = { - blockListener: async function (api, pool) { + blockListener: async function (api, pool, _config) { + console.log(`[PolkaStats backend v3] - \x1b[32mStarting block listener...\x1b[0m`); + // Subscribe to new blocks await api.rpc.chain.subscribeNewHeads(async (header) => { diff --git a/lib/crawlers/phragmen.js b/lib/crawlers/phragmen.js index 3035d021..a491f088 100644 --- a/lib/crawlers/phragmen.js +++ b/lib/crawlers/phragmen.js @@ -4,69 +4,90 @@ const fs = require('fs'); const DEFAULT_PHRAGMEN_OUTPUT_DIR = '/tmp/phragmen'; const DEFAULT_OFFLINE_PHRAGMEN_PATH = 'offline-phragmen'; +const DEFAULT_POLLING_TIME = 5 * 60 * 1000; -module.exports = { - async phragmen(api, pool, config) { - if (!config) { - return; - } - - log('\x1b[32mStarting phragmen crawler...\x1b[0m'); - - const phragmenOutputDir = - config.phragmenOutputDir || DEFAULT_PHRAGMEN_OUTPUT_DIR; - const offlinePhragmenPath = - config.offlinePhragmenPath || DEFAULT_OFFLINE_PHRAGMEN_PATH; - - if (!fs.existsSync(phragmenOutputDir)) { - fs.mkdirSync(phragmenOutputDir); - } - - const startTime = new Date().getTime(); - - const promises = Promise.all([ - api.derive.chain.bestNumber(), - api.query.staking.validatorCount(), - api.query.staking.minimumValidatorCount(), - ]); - const [blockHeight, validatorCount, minimumValidatorCount] = await promises; - - const phragmenArgs = buildPhragmenArgs( - config.wsProviderUrl, - phragmenOutputDir, - validatorCount, - minimumValidatorCount, - ); - const result = spawnSync(offlinePhragmenPath, phragmenArgs); - if (result.error) { - throw result.error; - } - - const phragmenOutput = fs.readFileSync(`${phragmenOutputDir}/output.json`); - if (phragmenOutput) { - const timestamp = new Date().getTime(); - const sqlInsert = buildInsertQuery( - blockHeight, - phragmenOutput, - timestamp, - ); - - await pool.query(sqlInsert); - } else { - log('\x1b[31mError!\x1b[0m'); - } - - // Execution end time - const endTime = new Date().getTime(); - - // - // Log execution time - // - const executionTime = ((endTime - startTime) / 1000).toFixed(0); - log(`\x1b[32mExecution time: ${executionTime}s\x1b[0m`); - log(`\x1b[32mNext execution in 5m...\x1b[0m`); - }, -}; +/** + * phragmen fetch some information from the blockchain using Poladot API, use + * the collected data to run "offline-phragmen" binary and stores the output + * on the database. + * + * @param {object} api Polkadot API object + * @param {object} pool Postgres pool object + * @param {object} configTemplate Configuration object + */ +async function phragmen(api, pool, configTemplate) { + const config = buildConfig(configTemplate); + + const startTime = new Date().getTime(); + log('\x1b[32mRunning phragmen crawler...\x1b[0m'); + + if (!fs.existsSync(config.phragmenOutputDir)) { + fs.mkdirSync(config.phragmenOutputDir); + } + + const promises = Promise.all([ + api.derive.chain.bestNumber(), + api.query.staking.validatorCount(), + api.query.staking.minimumValidatorCount(), + ]); + const [blockHeight, validatorCount, minimumValidatorCount] = await promises; + + const phragmenArgs = buildPhragmenArgs( + config.wsProviderUrl, + config.phragmenOutputDir, + validatorCount, + minimumValidatorCount, + ); + const result = spawnSync(config.offlinePhragmenPath, phragmenArgs); + if (result.error) { + throw result.error; + } + + const phragmenOutput = fs.readFileSync( + `${config.phragmenOutputDir}/output.json`, + ); + if (phragmenOutput) { + const timestamp = new Date().getTime(); + const sqlInsert = buildInsertQuery(blockHeight, phragmenOutput, timestamp); + + await pool.query(sqlInsert); + } else { + log('\x1b[31mError!\x1b[0m'); + } + + logExecutionTime(startTime); + + setTimeout(() => phragmen(api, pool, config), config.pollingTime); +} + +//////////////////////////////////////////////////////////////////////////////// +// Private functions +//////////////////////////////////////////////////////////////////////////////// + +function logExecutionTime(startTime) { + const endTime = new Date().getTime(); + const executionTime = ((endTime - startTime) / 1000).toFixed(0); + log(`\x1b[32mExecution time: ${executionTime}s\x1b[0m`); + log(`\x1b[32mNext execution in 5m...\x1b[0m`); +} + +//////////////////////////////////////////////////////////////////////////////// +// Aux functions +//////////////////////////////////////////////////////////////////////////////// + +function buildConfig(config) { + if (!config.wsProviderUrl) { + throw Error('Error: "wsProviderUrl" is required'); + } + + return { + wsProviderUrl: config.wsProviderUrl, + pollingTime: config.pollingTime || DEFAULT_POLLING_TIME, + phragmenOutputDir: config.phragmenOutputDir || DEFAULT_PHRAGMEN_OUTPUT_DIR, + offlinePhragmenPath: + config.offlinePhragmenPath || DEFAULT_OFFLINE_PHRAGMEN_PATH, + }; +} function buildPhragmenArgs( wsProviderUrl, @@ -94,3 +115,5 @@ function buildInsertQuery(blockHeight, phragmenOutput, timestamp) { function log() { console.log('[PolkaStats backend v3] - Phragmen crawler - ', ...arguments); } + +module.exports = { phragmen }; diff --git a/lib/crawlers/rewards.js b/lib/crawlers/rewards.js index 9cda12f1..646a0d65 100644 --- a/lib/crawlers/rewards.js +++ b/lib/crawlers/rewards.js @@ -6,7 +6,9 @@ const {BigNumber} = require('bignumber.js'); let crawlerIsRunning = false; module.exports = { - rewards: async function (api, pool) { + rewards: async function (api, pool, _config) { + console.log(`[PolkaStats backend v3] - \x1b[32mStarting rewards crawler...\x1b[0m`); + // Subscribe to new blocks await api.rpc.chain.subscribeNewHeads( async (header) => { diff --git a/lib/crawlers/staking.js b/lib/crawlers/staking.js index cce8a5ac..fa40c2ef 100644 --- a/lib/crawlers/staking.js +++ b/lib/crawlers/staking.js @@ -4,7 +4,8 @@ const {BigNumber} = require('bignumber.js'); let crawlerIsRunning = false; module.exports = { - staking: async function (api, pool) { + staking: async function (api, pool, _config) { + console.log(`[PolkaStats backend v3] - \x1b[32mStarting staking crawler...\x1b[0m`); // Subscribe to new blocks await api.rpc.chain.subscribeNewHeads(async (header) => { From 85c895a4279e274385fcf2677a079ba10088e613 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Diego=20Fern=C3=A1ndez=20Barrera?= Date: Sat, 14 Mar 2020 17:13:37 +0100 Subject: [PATCH 2/4] fix: Fix setTimeouts --- lib/crawlers/activeAccounts.js | 8 ++++++-- lib/crawlers/blockHarvester.js | 9 ++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/lib/crawlers/activeAccounts.js b/lib/crawlers/activeAccounts.js index 001ec7a0..be91a61f 100644 --- a/lib/crawlers/activeAccounts.js +++ b/lib/crawlers/activeAccounts.js @@ -2,7 +2,7 @@ module.exports = { activeAccounts: async function (api, pool, config) { console.log(`[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`); - + // Fetch active accounts const accounts = await api.derive.accounts.indexes(); @@ -54,6 +54,10 @@ module.exports = { } } } - setTimeout(module.exports.activeAccounts(api, pool, config), POLLING_TIME); + + setTimeout( + () => module.exports.activeAccounts(api, pool, config), + config.pollingTime, + ); } } \ No newline at end of file diff --git a/lib/crawlers/blockHarvester.js b/lib/crawlers/blockHarvester.js index 03e06f08..d4c8aab6 100644 --- a/lib/crawlers/blockHarvester.js +++ b/lib/crawlers/blockHarvester.js @@ -4,7 +4,7 @@ const { shortHash } = require('../utils.js'); module.exports = { blockHarvester: async function (api, pool, config) { console.log(`[PolkaStats backend v3] - \x1b[32mStarting block harvester...\x1b[0m`); - + // Start execution const startTime = new Date().getTime(); @@ -53,8 +53,11 @@ module.exports = { // console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mAdded ${addedBlocks} blocks in ${((endTime - startTime) / 1000).toFixed(0)}s\x1b[0m`); console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mNext execution in 60m...\x1b[0m`); - setTimeout(module.exports.blockHarvester(api, pool, config), POLLING_TIME); - + + setTimeout( + () => module.exports.blockHarvester(api, pool, config), + config.pollingTime, + ); }, harvestBlocks: async function(api, pool, startBlock, endBlock) { let addedBlocks = 0; From 423d3308c4bb21f57c8a4e0916b9925cf79f7db7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Diego=20Fern=C3=A1ndez=20Barrera?= Date: Sat, 14 Mar 2020 20:06:17 +0100 Subject: [PATCH 3/4] chore: Integrate eslint and prettier --- .eslintrc.js | 32 +++++ .prettierrc | 4 + lib/BackendV3.js | 24 +++- lib/crawlers/activeAccounts.js | 65 ++++++--- lib/crawlers/blockHarvester.js | 101 +++++++++----- lib/crawlers/blockListener.js | 57 ++++---- lib/crawlers/phragmen.js | 3 +- lib/crawlers/rewards.js | 242 +++++++++++++++++++-------------- lib/crawlers/staking.js | 172 +++++++++++++++-------- lib/utils.js | 14 +- package.json | 8 +- 11 files changed, 469 insertions(+), 253 deletions(-) create mode 100644 .eslintrc.js create mode 100644 .prettierrc diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 00000000..e09c0285 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,32 @@ +module.exports = { + parserOptions: { + ecmaVersion: 2017, + }, + + env: { + es2017: true, + node: true, + }, + + extends: ['eslint:recommended', 'prettier'], + + rules: { + // ERROR + 'no-var': 'error', + 'lines-between-class-members': ['error', 'always'], + 'padding-line-between-statements': [ + 'error', + { blankLine: 'always', prev: 'block-like', next: '*' }, + { blankLine: 'always', prev: 'function', next: 'function' }, + ], + 'newline-before-return': 'error', + 'object-shorthand': 'error', + 'space-before-blocks': 'error', + 'no-unused-vars': ['error', { argsIgnorePattern: '^_' }], + 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off', + + // WARNING + 'guard-for-in': 'warn', + 'no-console': 'warn', + }, +}; diff --git a/.prettierrc b/.prettierrc new file mode 100644 index 00000000..4ff2b8b3 --- /dev/null +++ b/.prettierrc @@ -0,0 +1,4 @@ +{ + "singleQuote": true, + "trailingComma": "all" +} \ No newline at end of file diff --git a/lib/BackendV3.js b/lib/BackendV3.js index 3cf0b506..c2caf877 100644 --- a/lib/BackendV3.js +++ b/lib/BackendV3.js @@ -1,5 +1,3 @@ -// @ts-check - const { ApiPromise, WsProvider } = require('@polkadot/api'); const { Pool } = require('pg'); const { blockListener } = require('./crawlers/blockListener.js'); @@ -56,7 +54,9 @@ class BackendV3 { } async getPolkadotAPI() { - console.log(`[PolkaStats backend v3] - \x1b[32mConnecting to ${this.config.wsProviderUrl}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - \x1b[32mConnecting to ${this.config.crawlerRunner.wsProviderUrl}\x1b[0m`, + ); const provider = new WsProvider(this.config.wsProviderUrl); const api = await ApiPromise.create({ provider }); @@ -67,31 +67,41 @@ class BackendV3 { let node; try { node = await api.rpc.system.health(); - } catch { - console.log(`[PolkaStats backend v3] - \x1b[31mCan't connect to node! Waiting 10s...\x1b[0m`); + } catch (_err) { + console.log( + `[PolkaStats backend v3] - \x1b[31mCan't connect to node! Waiting 10s...\x1b[0m`, + ); api.disconnect(); await wait(10000); + return false; } - console.log(`[PolkaStats backend v3] - \x1b[32mNode: ${JSON.stringify(node)}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - \x1b[32mNode: ${JSON.stringify(node)}\x1b[0m`, + ); if (node && node.isSyncing.eq(false)) { // Node is synced! console.log(`[PolkaStats backend v3] - \x1b[32mNode is synced!\x1b[0m`); this.nodeisSyncing = false; + return api; } else { - console.log(`[PolkaStats backend v3] - \x1b[33mNode is not synced! Waiting 10s...\x1b[0m`); + console.log( + `[PolkaStats backend v3] - \x1b[33mNode is not synced! Waiting 10s...\x1b[0m`, + ); api.disconnect(); await wait(10000); } + return false; } async getPool() { const pool = new Pool(this.config.postgresConnParams); await pool.connect(); + return pool; } } diff --git a/lib/crawlers/activeAccounts.js b/lib/crawlers/activeAccounts.js index be91a61f..1291e6b9 100644 --- a/lib/crawlers/activeAccounts.js +++ b/lib/crawlers/activeAccounts.js @@ -1,18 +1,21 @@ -// @ts-check module.exports = { - activeAccounts: async function (api, pool, config) { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`); + async activeAccounts(api, pool, config) { + console.log( + `[PolkaStats backend v3] - \x1b[32mStarting active accounts crawler...\x1b[0m`, + ); // Fetch active accounts const accounts = await api.derive.accounts.indexes(); let accountsInfo = []; - for (var key in accounts ) { + for (var key in accounts) { let accountId = key; - let accountIndex = accounts[key] + let accountIndex = accounts[key]; let accountInfo = await api.derive.accounts.info(accountId); - let identity = accountInfo.identity.display ? JSON.stringify(accountInfo.identity) : ''; + let identity = accountInfo.identity.display + ? JSON.stringify(accountInfo.identity) + : ''; let nickname = accountInfo.nickname ? accountInfo.nickname : ''; let balances = await api.derive.balances.all(accountId); accountsInfo[accountId] = { @@ -20,13 +23,15 @@ module.exports = { accountIndex, identity, nickname, - balances - } - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing account ${accountId}\x1b[0m`); + balances, + }; + console.log( + `[PolkaStats backend v3] - Active Accounts - \x1b[32mProcessing account ${accountId}\x1b[0m`, + ); } // Main loop - for (var key in accountsInfo ) { + for (var key in accountsInfo) { if (accountsInfo.hasOwnProperty(key)) { // console.log(key + " -> " + accounts[key]); let sql = `SELECT account_id FROM account WHERE account_id = '${key}'`; @@ -35,23 +40,45 @@ module.exports = { const resBlockHeight = await pool.query(sqlBlockHeight); if (res.rows.length > 0) { const timestamp = new Date().getTime(); - sql = `UPDATE account SET account_index = '${accountsInfo[key].accountIndex}', nickname = '${accountsInfo[key].nickname}', identity = '${accountsInfo[key].identity}', balances = '${JSON.stringify(accountsInfo[key].balances)}', timestamp = '${timestamp}', block_height = '${resBlockHeight.rows[0].block_number}' WHERE account_id = '${key}'`; + sql = `UPDATE account SET account_index = '${ + accountsInfo[key].accountIndex + }', nickname = '${accountsInfo[key].nickname}', identity = '${ + accountsInfo[key].identity + }', balances = '${JSON.stringify( + accountsInfo[key].balances, + )}', timestamp = '${timestamp}', block_height = '${ + resBlockHeight.rows[0].block_number + }' WHERE account_id = '${key}'`; try { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mUpdating account ${accountsInfo[key].accountIndex} [${key}]\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Active Accounts - \x1b[32mUpdating account ${accountsInfo[key].accountIndex} [${key}]\x1b[0m`, + ); await pool.query(sql); } catch (error) { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError updating account ${key}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Active Accounts - \x1b[31mError updating account ${key}\x1b[0m`, + ); } } else { const timestamp = new Date().getTime(); - sql = `INSERT INTO account (account_id, account_index, nickname, identity, balances, timestamp, block_height) VALUES ('${key}', '${accountsInfo[key].accountIndex}', '${accountsInfo[key].nickname}', '${accountsInfo[key].idenity}', '${JSON.stringify(accountsInfo[key].balances)}', '${timestamp}', '${resBlockHeight.rows[0].block_number}');`; + sql = `INSERT INTO account (account_id, account_index, nickname, identity, balances, timestamp, block_height) VALUES ('${key}', '${ + accountsInfo[key].accountIndex + }', '${accountsInfo[key].nickname}', '${ + accountsInfo[key].idenity + }', '${JSON.stringify( + accountsInfo[key].balances, + )}', '${timestamp}', '${resBlockHeight.rows[0].block_number}');`; try { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[32mAdding account ${accountsInfo[key].accountIndex} [${key}]\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Active Accounts - \x1b[32mAdding account ${accountsInfo[key].accountIndex} [${key}]\x1b[0m`, + ); await pool.query(sql); } catch (error) { - console.log(`[PolkaStats backend v3] - Active Accounts - \x1b[31mError adding new account ${key}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Active Accounts - \x1b[31mError adding new account ${key}\x1b[0m`, + ); } - } + } } } @@ -59,5 +86,5 @@ module.exports = { () => module.exports.activeAccounts(api, pool, config), config.pollingTime, ); - } -} \ No newline at end of file + }, +}; diff --git a/lib/crawlers/blockHarvester.js b/lib/crawlers/blockHarvester.js index d4c8aab6..fdf5386f 100644 --- a/lib/crawlers/blockHarvester.js +++ b/lib/crawlers/blockHarvester.js @@ -1,9 +1,10 @@ -// @ts-check const { shortHash } = require('../utils.js'); module.exports = { - blockHarvester: async function (api, pool, config) { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting block harvester...\x1b[0m`); + async blockHarvester(api, pool, config) { + console.log( + `[PolkaStats backend v3] - \x1b[32mStarting block harvester...\x1b[0m`, + ); // Start execution const startTime = new Date().getTime(); @@ -36,31 +37,45 @@ module.exports = { gap_end DESC `; const res = await pool.query(sqlSelect); - + for (let i = 0; i < res.rows.length; i++) { // Quick fix for gap 0-0 error if (!(res.rows[i].gap_start == 0 && res.rows[i].gap_end == 0)) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mDetected gap! Harvesting blocks from #${res.rows[i].gap_end} to #${res.rows[i].gap_start}\x1b[0m`); - await module.exports.harvestBlocks(api, pool, parseInt(res.rows[i].gap_start), parseInt(res.rows[i].gap_end)); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[32mDetected gap! Harvesting blocks from #${res.rows[i].gap_end} to #${res.rows[i].gap_start}\x1b[0m`, + ); + await module.exports.harvestBlocks( + api, + pool, + parseInt(res.rows[i].gap_start), + parseInt(res.rows[i].gap_end), + ); } } - + // Execution end time const endTime = new Date().getTime(); - - // + + // // Log execution time // - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mAdded ${addedBlocks} blocks in ${((endTime - startTime) / 1000).toFixed(0)}s\x1b[0m`); - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mNext execution in 60m...\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[32mAdded ${addedBlocks} blocks in ${( + (endTime - startTime) / + 1000 + ).toFixed(0)}s\x1b[0m`, + ); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[32mNext execution in 60m...\x1b[0m`, + ); setTimeout( () => module.exports.blockHarvester(api, pool, config), config.pollingTime, ); }, - harvestBlocks: async function(api, pool, startBlock, endBlock) { - let addedBlocks = 0; + + async harvestBlocks(api, pool, startBlock, endBlock) { while (endBlock >= startBlock) { // Start execution const startTime = new Date().getTime(); @@ -70,7 +85,7 @@ module.exports = { const extendedHeader = await api.derive.chain.getHeader(blockHash); // Get block parent hash const parentHash = extendedHeader.parentHash; - + // Get block extrinsics root const extrinsicsRoot = extendedHeader.extrinsicsRoot; // Get block state root @@ -83,16 +98,17 @@ module.exports = { try { await pool.query(sqlDelete); } catch (error) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError deleting events for block #${endBlock}: ${error}, sql: ${sqlDelete}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[31mError deleting events for block #${endBlock}: ${error}, sql: ${sqlDelete}\x1b[0m`, + ); } // Loop through the Vec - blockEvents.forEach( async (record, index) => { - + blockEvents.forEach(async (record, index) => { // Extract the phase and event const { event, phase } = record; // console.log(JSON.stringify(record, null, 2)); - + // // TODO: Update counters in block table: // @@ -108,8 +124,7 @@ module.exports = { // new_sessions // - const sqlInsert = - `INSERT INTO event ( + const sqlInsert = `INSERT INTO event ( block_number, event_index, section, @@ -126,9 +141,13 @@ module.exports = { );`; try { await pool.query(sqlInsert); - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32m=> Adding event #${endBlock}-${index} ${event.section} => ${event.method}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[32m=> Adding event #${endBlock}-${index} ${event.section} => ${event.method}\x1b[0m`, + ); } catch (error) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError adding event #${endBlock}-${index}: ${error}, sql: ${sqlInsert}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[31mError adding event #${endBlock}-${index}: ${error}, sql: ${sqlInsert}\x1b[0m`, + ); } }); // Get session info for the block @@ -136,16 +155,23 @@ module.exports = { const currentSlot = await api.query.babe.currentSlot.at(blockHash); const epochIndex = await api.query.babe.epochIndex.at(blockHash); const genesisSlot = await api.query.babe.genesisSlot.at(blockHash); - const currentEraStartSessionIndex = await api.query.staking.currentEraStartSessionIndex.at(blockHash); + const currentEraStartSessionIndex = await api.query.staking.currentEraStartSessionIndex.at( + blockHash, + ); const currentEra = await api.query.staking.currentEra.at(blockHash); - const validatorCount = await api.query.staking.validatorCount.at(blockHash); + const validatorCount = await api.query.staking.validatorCount.at( + blockHash, + ); const epochDuration = api.consts.babe.epochDuration; const sessionsPerEra = api.consts.staking.sessionsPerEra; const eraLength = epochDuration.mul(sessionsPerEra); const epochStartSlot = epochIndex.mul(epochDuration).add(genesisSlot); const sessionProgress = currentSlot.sub(epochStartSlot); - const eraProgress = currentIndex.sub(currentEraStartSessionIndex).mul(epochDuration).add(sessionProgress); - + const eraProgress = currentIndex + .sub(currentEraStartSessionIndex) + .mul(epochDuration) + .add(sessionProgress); + // Get block author const blockAuthor = extendedHeader.author; // Get block author identity display name @@ -161,11 +187,12 @@ module.exports = { try { await pool.query(sqlDelete); } catch (error) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError deleting events for block #${endBlock}: ${error}, sql: ${sqlDelete}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[31mError deleting events for block #${endBlock}: ${error}, sql: ${sqlDelete}\x1b[0m`, + ); } - const sqlInsert = - `INSERT INTO block ( + const sqlInsert = `INSERT INTO block ( block_number, block_author, block_author_name, @@ -209,12 +236,18 @@ module.exports = { try { await pool.query(sqlInsert); const endTime = new Date().getTime(); - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[32mAdded block #${endBlock} (${shortHash(blockHash.toString())}) in ${((endTime - startTime) / 1000).toFixed(3)}s\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[32mAdded block #${endBlock} (${shortHash( + blockHash.toString(), + )}) in ${((endTime - startTime) / 1000).toFixed(3)}s\x1b[0m`, + ); } catch (error) { - console.log(`[PolkaStats backend v3] - Block harvester - \x1b[31mError adding block #${endBlock}: ${error.error}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block harvester - \x1b[31mError adding block #${endBlock}: ${error.error}\x1b[0m`, + ); } + endBlock--; - addedBlocks++; } - } -} \ No newline at end of file + }, +}; diff --git a/lib/crawlers/blockListener.js b/lib/crawlers/blockListener.js index 34a9596d..e66ed738 100644 --- a/lib/crawlers/blockListener.js +++ b/lib/crawlers/blockListener.js @@ -1,13 +1,13 @@ -// @ts-check const { shortHash } = require('../utils.js'); module.exports = { - blockListener: async function (api, pool, _config) { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting block listener...\x1b[0m`); + async blockListener(api, pool, _config) { + console.log( + `[PolkaStats backend v3] - \x1b[32mStarting block listener...\x1b[0m`, + ); // Subscribe to new blocks - await api.rpc.chain.subscribeNewHeads(async (header) => { - + await api.rpc.chain.subscribeNewHeads(async header => { // Get block number const blockNumber = header.number.toNumber(); @@ -19,7 +19,7 @@ module.exports = { // Get block parent hash const parentHash = header.parentHash; - + // Get block extrinsics root const extrinsicsRoot = header.extrinsicsRoot; @@ -44,7 +44,9 @@ module.exports = { const res = await pool.query(sqlSelect); if (res.rows.length > 0) { // Chain reorganization detected! We need to update block_author, block_hash and state_root - console.log(`[PolkaStats backend v3] - Block listener - \x1b[32mDetected chain reorganization at block #${blockNumber}, updating author, author name, hash and state root\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block listener - \x1b[32mDetected chain reorganization at block #${blockNumber}, updating author, author name, hash and state root\x1b[0m`, + ); // Get block author const blockAuthor = extendedHeader.author; @@ -53,16 +55,17 @@ module.exports = { const blockAuthorIdentity = await api.derive.accounts.info(blockAuthor); const blockAuthorName = blockAuthorIdentity.identity.display || ``; - const sqlUpdate = - `UPDATE block SET block_author = '${blockAuthor}', block_author_name = '${blockAuthorName}', block_hash = '${blockHash}', state_root = '${stateRoot}' WHERE block_number = '${blockNumber}'`; - const res = await pool.query(sqlUpdate); - + const sqlUpdate = `UPDATE block SET block_author = '${blockAuthor}', block_author_name = '${blockAuthorName}', block_hash = '${blockHash}', state_root = '${stateRoot}' WHERE block_number = '${blockNumber}'`; + await pool.query(sqlUpdate); } else { // Store new block - console.log(`[PolkaStats backend v3] - Block listener - \x1b[32mAdding block #${blockNumber} (${shortHash(blockHash.toString())})\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block listener - \x1b[32mAdding block #${blockNumber} (${shortHash( + blockHash.toString(), + )})\x1b[0m`, + ); const timestamp = new Date().getTime(); - const sqlInsert = - `INSERT INTO block ( + const sqlInsert = `INSERT INTO block ( block_number, block_author, block_author_name, @@ -107,17 +110,19 @@ module.exports = { try { await pool.query(sqlInsert); } catch (error) { - console.log(`[PolkaStats backend v3] - Block listener - \x1b[31mError: ${error}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block listener - \x1b[31mError: ${error}\x1b[0m`, + ); } // Get block events const blockEvents = await api.query.system.events.at(blockHash); // Loop through the Vec - blockEvents.forEach( async (record, index) => { + blockEvents.forEach(async (record, index) => { // Extract the phase and event const { event, phase } = record; - + // // TODO: Update counters in block table: // @@ -132,9 +137,8 @@ module.exports = { // new_contracts // new_sessions // - - const sqlInsert = - `INSERT INTO event ( + + const sqlInsert = `INSERT INTO event ( block_number, event_index, section, @@ -151,13 +155,16 @@ module.exports = { );`; try { await pool.query(sqlInsert); - console.log(`[PolkaStats backend v3] - Block listener - \x1b[32m=> Adding event #${blockNumber}-${index} ${event.section} => ${event.method}\x1b[0m`); - + console.log( + `[PolkaStats backend v3] - Block listener - \x1b[32m=> Adding event #${blockNumber}-${index} ${event.section} => ${event.method}\x1b[0m`, + ); } catch (error) { - console.log(`[PolkaStats backend v3] - Block listener - \x1b[31mError adding event #${blockNumber}-${index}: ${error}, sql: ${sqlInsert}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Block listener - \x1b[31mError adding event #${blockNumber}-${index}: ${error}, sql: ${sqlInsert}\x1b[0m`, + ); } }); } }); - } -} \ No newline at end of file + }, +}; diff --git a/lib/crawlers/phragmen.js b/lib/crawlers/phragmen.js index a491f088..d9ab2019 100644 --- a/lib/crawlers/phragmen.js +++ b/lib/crawlers/phragmen.js @@ -1,4 +1,3 @@ -// @ts-check const { spawnSync } = require('child_process'); const fs = require('fs'); @@ -33,7 +32,7 @@ async function phragmen(api, pool, configTemplate) { const [blockHeight, validatorCount, minimumValidatorCount] = await promises; const phragmenArgs = buildPhragmenArgs( - config.wsProviderUrl, + api.wsProviderUrl, config.phragmenOutputDir, validatorCount, minimumValidatorCount, diff --git a/lib/crawlers/rewards.js b/lib/crawlers/rewards.js index 646a0d65..e6cd337c 100644 --- a/lib/crawlers/rewards.js +++ b/lib/crawlers/rewards.js @@ -1,121 +1,151 @@ -// @ts-check - const axios = require('axios'); -const {BigNumber} = require('bignumber.js'); +const { BigNumber } = require('bignumber.js'); let crawlerIsRunning = false; module.exports = { - rewards: async function (api, pool, _config) { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting rewards crawler...\x1b[0m`); + async rewards(api, pool, _config) { + console.log( + `[PolkaStats backend v3] - \x1b[32mStarting rewards crawler...\x1b[0m`, + ); // Subscribe to new blocks - await api.rpc.chain.subscribeNewHeads( async (header) => { - + await api.rpc.chain.subscribeNewHeads(async header => { let currentDBIndex; - + // Get block number const blockNumber = header.number.toNumber(); // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mNew block #${blockNumber}\x1b[0m`); - + // Get last index stored in DB const sqlSelect = `SELECT session_index FROM validator_staking ORDER BY session_index DESC LIMIT 1`; const res = await pool.query(sqlSelect); if (res.rows.length > 0) { - currentDBIndex = parseInt(res.rows[0]["session_index"]); + currentDBIndex = parseInt(res.rows[0]['session_index']); // console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[33mLast session index stored in DB is #${currentDBIndex}\x1b[0m`); } else { currentDBIndex = 0; if (!crawlerIsRunning) { - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[33mFirst execution, no session index found in DB!\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[33mFirst execution, no session index found in DB!\x1b[0m`, + ); } } - + // Get current session index const session = await api.derive.session.info(); const currentIndex = session.currentIndex.toNumber(); // console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[32mCurrent session index is #${currentIndex}\x1b[0m`); - + if (currentIndex > currentDBIndex) { if (!crawlerIsRunning) { - await module.exports.storeRewardsInfo(api, pool, blockNumber, currentIndex); + await module.exports.storeRewardsInfo( + api, + pool, + blockNumber, + currentIndex, + ); } } }); }, - storeRewardsInfo: async function (api, pool, blockNumber, currentIndex) { + + async storeRewardsInfo(api, pool, blockNumber, currentIndex) { crawlerIsRunning = true; - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[33mStoring rewards at block #${blockNumber} (session #${currentIndex})\x1b[0m`); - - // Fetch staking information of elected validators - const electedInfo = await api.derive.staking.electedInfo(); - - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[33mElected info: ${JSON.stringify(electedInfo)})\x1b[0m`); - - // Fetch last reward events from PolkaScan - const response = await axios.default.get(`https://api-01.polkascan.io/kusama/api/v1/event?&filter[module_id]=staking&filter[event_id]=Reward`); - - const rewardEvents = response.data.data; - - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[33mLast rewards: ${JSON.stringify(rewardEvents)})\x1b[0m`); - - // Fetch rewards event info, retrieve hash, era points and - // elected validators for all the blocks at the end of the era - let rewards = []; - rewards = await Promise.all( - rewardEvents.map(async event => { - let reward_block_id = event.attributes.block_id; - let value = event.attributes.attributes[0].value; - let end_era_block_id = event.attributes.block_id - 1; - let hash = await api.rpc.chain.getBlockHash(end_era_block_id); - let eraPoints = await api.query.staking.currentEraPointsEarned.at(hash.toString()); - let endEraValidatorList = await api.query.staking.currentElected.at(hash.toString()); - const session_index = await api.query.session.currentIndex.at(hash); - - return { - session_index, - reward_block_id, - reward: value, - end_era_block_id, - end_era_block_hash: hash.toString(), - points: eraPoints, - elected_validators: endEraValidatorList + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[33mStoring rewards at block #${blockNumber} (session #${currentIndex})\x1b[0m`, + ); + + // Fetch staking information of elected validators + const electedInfo = await api.derive.staking.electedInfo(); + + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[33mElected info: ${JSON.stringify( + electedInfo, + )})\x1b[0m`, + ); + + // Fetch last reward events from PolkaScan + const response = await axios.default.get( + `https://api-01.polkascan.io/kusama/api/v1/event?&filter[module_id]=staking&filter[event_id]=Reward`, + ); + + const rewardEvents = response.data.data; + + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[33mLast rewards: ${JSON.stringify( + rewardEvents, + )})\x1b[0m`, + ); + + // Fetch rewards event info, retrieve hash, era points and + // elected validators for all the blocks at the end of the era + let rewards = []; + rewards = await Promise.all( + rewardEvents.map(async event => { + let reward_block_id = event.attributes.block_id; + let value = event.attributes.attributes[0].value; + let end_era_block_id = event.attributes.block_id - 1; + let hash = await api.rpc.chain.getBlockHash(end_era_block_id); + let eraPoints = await api.query.staking.currentEraPointsEarned.at( + hash.toString(), + ); + let endEraValidatorList = await api.query.staking.currentElected.at( + hash.toString(), + ); + const session_index = await api.query.session.currentIndex.at(hash); + + return { + session_index, + reward_block_id, + reward: value, + end_era_block_id, + end_era_block_hash: hash.toString(), + points: eraPoints, + elected_validators: endEraValidatorList, + }; + }), + ); + + // Fetch commission for current elected validators + let allRewards = await Promise.all( + electedInfo.currentElected.map(async address => { + const stakeInfo = electedInfo.info.find( + val => val.stashId.toString() === address.toString(), + ); + const commission = + stakeInfo.validatorPrefs.commission.toNumber() / 10 ** 7; + + let eraRewards = []; + rewards.forEach(reward => { + let era_points = 0; + let era_points_percent = 0; + let total_era_points = reward.points.total.toNumber(); + let index = reward.elected_validators.indexOf(address); + + if (index >= 0) { + era_points = reward.points.individual[index].toNumber(); + era_points_percent = (era_points * 100) / total_era_points; } - }) - ); - - // Fetch commission for current elected validators - let allRewards = await Promise.all( - electedInfo.currentElected.map(async address => { - - const stakeInfo = electedInfo.info - .find( - val => val.stashId.toString() === address.toString() - ) - const commission = stakeInfo.validatorPrefs.commission.toNumber() / 10 ** 7; - - let eraRewards = [] - rewards.forEach(reward => { - let era_points = 0; - let era_points_percent = 0; - let total_era_points = reward.points.total.toNumber(); - let index = reward.elected_validators.indexOf(address); - - if (index >= 0) { - era_points = reward.points.individual[index].toNumber() ; - era_points_percent = (era_points * 100) / total_era_points; - } - - let pool_reward_with_commission = ((reward.reward / 100) * era_points_percent) / 10 ** 12; - let pool_reward = (1 - commission / 100) * pool_reward_with_commission; - let total_stake = new BigNumber(Number(stakeInfo.stakers.total) / 10 ** 12); - // Daily earning logic for frontend - const stake_amount = new BigNumber(1000); - const user_stake_fraction = stake_amount.div(total_stake.plus(stake_amount)); - // Per era - const estimated_payout = user_stake_fraction.multipliedBy(pool_reward_with_commission); - - eraRewards.push({ + + let pool_reward_with_commission = + ((reward.reward / 100) * era_points_percent) / 10 ** 12; + let pool_reward = + (1 - commission / 100) * pool_reward_with_commission; + let total_stake = new BigNumber( + Number(stakeInfo.stakers.total) / 10 ** 12, + ); + // Daily earning logic for frontend + const stake_amount = new BigNumber(1000); + const user_stake_fraction = stake_amount.div( + total_stake.plus(stake_amount), + ); + // Per era + const estimated_payout = user_stake_fraction.multipliedBy( + pool_reward_with_commission, + ); + + eraRewards.push({ reward_session: reward.session_index, reward_block_id: reward.reward_block_id, reward_amount: reward.reward, @@ -125,33 +155,47 @@ module.exports = { pool_reward_with_commission, pool_reward, total_stake, - estimated_payout + estimated_payout, }); - }); return { stash_id: address, commission, eraRewards, - stake_info: stakeInfo - } - - }) + stake_info: stakeInfo, + }; + }), ); if (allRewards) { - allRewards.forEach( async reward => { + allRewards.forEach(async reward => { let sqlInsert = `INSERT INTO rewards (block_number, session_index, stash_id, era_rewards, commission, stake_info, timestamp) - VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify(reward.stash_id)}', '${JSON.stringify(reward.eraRewards.join(''))}', '${reward.commission}', '${JSON.stringify(reward.stake_info)}', extract(epoch from now()));`; + VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify( + reward.stash_id, + )}', '${JSON.stringify(reward.eraRewards.join(''))}', '${ + reward.commission + }', '${JSON.stringify( + reward.stake_info, + )}', extract(epoch from now()));`; try { const res = await pool.query(sqlInsert); - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[33mResponse from Database is ${JSON.stringify(res)}]`) + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[33mResponse from Database is ${JSON.stringify( + res, + )}]`, + ); } catch (error) { - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[31mSQL: ${sqlInsert}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Rewards crawler - \x1b[31mERROR: ${JSON.stringify(error)}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[31mSQL: ${sqlInsert}\x1b[0m`, + ); + console.log( + `[PolkaStats backend v3] - Rewards crawler - \x1b[31mERROR: ${JSON.stringify( + error, + )}\x1b[0m`, + ); } - }) + }); } - } -} \ No newline at end of file + }, +}; diff --git a/lib/crawlers/staking.js b/lib/crawlers/staking.js index fa40c2ef..ea157f21 100644 --- a/lib/crawlers/staking.js +++ b/lib/crawlers/staking.js @@ -1,138 +1,182 @@ -const {BigNumber} = require('bignumber.js'); +const { BigNumber } = require('bignumber.js'); -// @ts-check let crawlerIsRunning = false; module.exports = { - staking: async function (api, pool, _config) { - console.log(`[PolkaStats backend v3] - \x1b[32mStarting staking crawler...\x1b[0m`); - - // Subscribe to new blocks - await api.rpc.chain.subscribeNewHeads(async (header) => { + async staking(api, pool, _config) { + console.log( + `[PolkaStats backend v3] - \x1b[32mStarting staking crawler...\x1b[0m`, + ); + // Subscribe to new blocks + await api.rpc.chain.subscribeNewHeads(async header => { let currentDBIndex; - + // Get block number const blockNumber = header.number.toNumber(); // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mNew block #${blockNumber}\x1b[0m`); - + // Get last index stored in DB const sqlSelect = `SELECT session_index FROM validator_staking ORDER BY session_index DESC LIMIT 1`; const res = await pool.query(sqlSelect); if (res.rows.length > 0) { - currentDBIndex = parseInt(res.rows[0]["session_index"]); + currentDBIndex = parseInt(res.rows[0]['session_index']); // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mLast session index stored in DB is #${currentDBIndex}\x1b[0m`); } else { currentDBIndex = 0; if (!crawlerIsRunning) { - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mFirst execution, no session index found in DB!\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mFirst execution, no session index found in DB!\x1b[0m`, + ); } } - + // Get current session info const sessionInfo = await api.derive.session.info(); - + if (sessionInfo.currentIndex > currentDBIndex) { if (!crawlerIsRunning) { - await module.exports.storeStakingInfo(api, pool, blockNumber, sessionInfo); + await module.exports.storeStakingInfo( + api, + pool, + blockNumber, + sessionInfo, + ); } } }); }, - storeStakingInfo: async function (api, pool, blockNumber, sessionInfo) { + + async storeStakingInfo(api, pool, blockNumber, sessionInfo) { crawlerIsRunning = true; - + const currentIndex = sessionInfo.currentIndex.toNumber(); // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[32mCurrent session index is #${currentIndex}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mStoring validators staking info for at block #${blockNumber} (session #${currentIndex})\x1b[0m`); - + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mStoring validators staking info for at block #${blockNumber} (session #${currentIndex})\x1b[0m`, + ); + // // Get active validators, imOnline data, current elected and current era points earned // - const [validators, imOnline, currentElected, currentEraPointsEarned] = await Promise.all([ + const [ + validators, + imOnline, + currentElected, + currentEraPointsEarned, + ] = await Promise.all([ api.query.session.validators(), api.derive.imOnline.receivedHeartbeats(), api.query.staking.currentElected(), - api.query.staking.currentEraPointsEarned() + api.query.staking.currentEraPointsEarned(), ]); - + // // Map validator authorityId to staking info object // const validatorStaking = await Promise.all( - validators.map(authorityId => api.derive.staking.account(authorityId)) + validators.map(authorityId => api.derive.staking.account(authorityId)), ); - + // // Add hex representation of sessionId[] and nextSessionId[] // validatorStaking.forEach(validator => { - validator.sessionIdHex = validator.sessionIds.length !== 0 ? validator.sessionIds.toHex() : ``; - validator.nextSessionIdHex = validator.nextSessionIds.length !== 0 ? validator.nextSessionIds.toHex() : ``; - }) - + validator.sessionIdHex = + validator.sessionIds.length !== 0 ? validator.sessionIds.toHex() : ``; + validator.nextSessionIdHex = + validator.nextSessionIds.length !== 0 + ? validator.nextSessionIds.toHex() + : ``; + }); + // // Add imOnline property to validator object // - validatorStaking.forEach(function (validator) { + validatorStaking.forEach(function(validator) { if (imOnline[validator.accountId]) { validator.imOnline = imOnline[validator.accountId]; } }, imOnline); - + // // Add current elected and earned era points to validator object // - for(let i = 0; i < validatorStaking.length; i++) { + for (let i = 0; i < validatorStaking.length; i++) { let validator = validatorStaking[i]; if (Number.isInteger(currentElected.indexOf(validator.accountId))) { validator.currentElected = true; } else { validator.currentElected = false; } - if (currentEraPointsEarned.individual[currentElected.indexOf(validator.accountId)]) { - validator.currentEraPointsEarned = currentEraPointsEarned.individual[currentElected.indexOf(validator.accountId)]; + + if ( + currentEraPointsEarned.individual[ + currentElected.indexOf(validator.accountId) + ] + ) { + validator.currentEraPointsEarned = + currentEraPointsEarned.individual[ + currentElected.indexOf(validator.accountId) + ]; } } - + if (validatorStaking) { - let sqlInsert = `INSERT INTO validator_staking (block_number, session_index, json, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify(validatorStaking)}', extract(epoch from now()));`; + let sqlInsert = `INSERT INTO validator_staking (block_number, session_index, json, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify( + validatorStaking, + )}', extract(epoch from now()));`; try { await pool.query(sqlInsert); } catch (error) { // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mSQL: ${sqlInsert}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify(error)}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify( + error, + )}\x1b[0m`, + ); } } // // Populate graph data tables // - validatorStaking.forEach(async validator => { - // populate validator_bonded table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_bonded table\x1b[0m`); - let sql = `INSERT INTO validator_bonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${BigNumber(validator.exposure.total).toString(10)}', extract(epoch from now()));`; + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_bonded table\x1b[0m`, + ); + let sql = `INSERT INTO validator_bonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${BigNumber( + validator.exposure.total, + ).toString(10)}', extract(epoch from now()));`; await pool.query(sql); - + // populate validator_selfbonded table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_selfbonded table\x1b[0m`); - sql = `INSERT INTO validator_selfbonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${BigNumber(validator.exposure.own).toString(10)}', extract(epoch from now()));`; + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_selfbonded table\x1b[0m`, + ); + sql = `INSERT INTO validator_selfbonded (block_number, session_index, account_id, amount, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${BigNumber( + validator.exposure.own, + ).toString(10)}', extract(epoch from now()));`; await pool.query(sql); // populate validator_num_nominators table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_num_nominators table\x1b[0m`); - sql = `INSERT INTO validator_num_nominators (block_number, session_index, account_id, nominators, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${validator.exposure.others.length}', extract(epoch from now()));`; + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_num_nominators table\x1b[0m`, + ); + sql = `INSERT INTO validator_num_nominators (block_number, session_index, account_id, nominators, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', '${ + validator.exposure.others.length + }', extract(epoch from now()));`; await pool.query(sql); // populate validator_active table - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_num_nominators table\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mPopulating validator_num_nominators table\x1b[0m`, + ); sql = `INSERT INTO validator_num_nominators (block_number, session_index, account_id, active, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${validator.accountId.toString()}', 'true', extract(epoch from now()));`; await pool.query(sql); - - }) + }); // // Populate validator_era_points table @@ -156,45 +200,53 @@ module.exports = { // await pool.query(sql); // } // }) - + // // Fetch intention validators // - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[33mStoring intentions staking info at block #${blockNumber} (session #${currentIndex})\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[33mStoring intentions staking info at block #${blockNumber} (session #${currentIndex})\x1b[0m`, + ); const intentionValidators = await api.query.staking.validators(); const intentions = intentionValidators[0]; - + // // Map validator authorityId to staking info object // const intentionStaking = await Promise.all( - intentions.map(authorityId => api.derive.staking.account(authorityId)) + intentions.map(authorityId => api.derive.staking.account(authorityId)), ); - + // // Add hex representation of sessionId[] and nextSessionId[] // - for(let i = 0; i < intentionStaking.length; i++) { + for (let i = 0; i < intentionStaking.length; i++) { let intention = intentionStaking[i]; if (intention.sessionIds.length > 0) { intention.sessionIdHex = intention.sessionIds.toHex(); } + if (intention.nextSessionIds.length > 0) { intention.nextSessionIdHex = intention.nextSessionIds.toHex(); } } - + if (intentionStaking) { - let sqlInsert = `INSERT INTO intention_staking (block_number, session_index, json, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify(intentionStaking)}', extract(epoch from now()));`; + let sqlInsert = `INSERT INTO intention_staking (block_number, session_index, json, timestamp) VALUES ('${blockNumber}', '${currentIndex}', '${JSON.stringify( + intentionStaking, + )}', extract(epoch from now()));`; try { await pool.query(sqlInsert); } catch (error) { // console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mSQL: ${sqlInsert}\x1b[0m`); - console.log(`[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify(error)}\x1b[0m`); + console.log( + `[PolkaStats backend v3] - Staking crawler - \x1b[31mERROR: ${JSON.stringify( + error, + )}\x1b[0m`, + ); } } - + crawlerIsRunning = false; - } -} - \ No newline at end of file + }, +}; diff --git a/lib/utils.js b/lib/utils.js index 10ca9f00..3654a364 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -1,13 +1,15 @@ module.exports = { - formatNumber: function (number) { - return (number.toString()).replace(/(\d)(?=(\d{3})+(?!\d))/g, '$1,'); + formatNumber(number) { + return number.toString().replace(/(\d)(?=(\d{3})+(?!\d))/g, '$1,'); }, - shortHash: function (hash) { + + shortHash(hash) { return `${hash.substr(0, 6)}…${hash.substr(hash.length - 5, 4)}`; }, - wait: async function (ms) { + + wait(ms) { return new Promise(resolve => { setTimeout(resolve, ms); }); - } -}; \ No newline at end of file + }, +}; diff --git a/package.json b/package.json index db5a88ea..8037c087 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,8 @@ "docker:postgres": "docker-compose -f docker/polkastats-backend/docker-compose.yml up -d postgres", "docker:graphql": "docker-compose -f docker/polkastats-backend/docker-compose.yml up -d graphql-engine", "docker:crawler": "docker-compose -f docker/polkastats-backend/docker-compose.yml up -d crawler", - "docker:phragmen": "docker-compose -f docker/polkastats-backend/docker-compose.yml up -d phragmen" + "docker:phragmen": "docker-compose -f docker/polkastats-backend/docker-compose.yml up -d phragmen", + "lint": "eslint '**/*.js'" }, "repository": { "type": "git", @@ -35,5 +36,10 @@ "axios": "^0.19.2", "bignumber.js": "^9.0.0", "pg": "^7.18.1" + }, + "devDependencies": { + "eslint": "^6.8.0", + "eslint-config-prettier": "^6.10.0", + "prettier": "^1.19.1" } } From 4fb125694513c2086802fd942b10f0da2cc6da51 Mon Sep 17 00:00:00 2001 From: Bitcoinera Date: Thu, 19 Mar 2020 01:24:31 +0100 Subject: [PATCH 4/4] remove trailing commas --- .prettierrc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.prettierrc b/.prettierrc index 4ff2b8b3..dc2fb828 100644 --- a/.prettierrc +++ b/.prettierrc @@ -1,4 +1,3 @@ { - "singleQuote": true, - "trailingComma": "all" + "singleQuote": true } \ No newline at end of file