diff --git a/db/migrations/008.do.create-participants-table.sql b/db/migrations/008.do.create-participants-table.sql new file mode 100644 index 0000000..acad095 --- /dev/null +++ b/db/migrations/008.do.create-participants-table.sql @@ -0,0 +1,4 @@ +CREATE TABLE participants ( + id SERIAL PRIMARY KEY, + participant_address TEXT NOT NULL UNIQUE +); diff --git a/db/migrations/009.do.backfill-participants.sql b/db/migrations/009.do.backfill-participants.sql new file mode 100644 index 0000000..688e5da --- /dev/null +++ b/db/migrations/009.do.backfill-participants.sql @@ -0,0 +1,9 @@ +-- Backfill existing participant addresses from daily_scheduled_rewards +INSERT INTO participants (participant_address) +SELECT DISTINCT participant_address FROM daily_scheduled_rewards +ON CONFLICT (participant_address) DO NOTHING; + +-- Backfill existing participant addresses from daily_reward_transfers +INSERT INTO participants (participant_address) +SELECT DISTINCT to_address FROM daily_reward_transfers +ON CONFLICT (participant_address) DO NOTHING; \ No newline at end of file diff --git a/db/migrations/010.do.migrate-foreign-keys.sql b/db/migrations/010.do.migrate-foreign-keys.sql new file mode 100644 index 0000000..90eb9db --- /dev/null +++ b/db/migrations/010.do.migrate-foreign-keys.sql @@ -0,0 +1,46 @@ +-- Step 1: Add foreign key columns +ALTER TABLE daily_scheduled_rewards ADD COLUMN participant_id INT; +ALTER TABLE daily_reward_transfers ADD COLUMN to_address_id INT; + +-- Step 2: Populate the new foreign key columns +UPDATE daily_scheduled_rewards dsr +SET participant_id = p.id +FROM participants p +WHERE dsr.participant_address = p.participant_address; + +UPDATE daily_reward_transfers drt +SET to_address_id = p.id +FROM participants p +WHERE drt.to_address = p.participant_address; + +-- Step 3: Replace Primary Keys +ALTER TABLE daily_scheduled_rewards +DROP CONSTRAINT daily_scheduled_rewards_pkey; +ALTER TABLE daily_scheduled_rewards +ADD PRIMARY KEY (day, participant_id); + +ALTER TABLE daily_reward_transfers +DROP CONSTRAINT daily_reward_transfers_pkey; +ALTER TABLE daily_reward_transfers +ADD PRIMARY KEY (day, to_address_id); + +-- Step 4: Add Foreign Key Constraints +ALTER TABLE daily_scheduled_rewards +ADD CONSTRAINT fk_dsr_participant FOREIGN KEY (participant_id) +REFERENCES participants(id) ON DELETE CASCADE; + +ALTER TABLE daily_reward_transfers +ADD CONSTRAINT fk_drt_to_address FOREIGN KEY (to_address_id) +REFERENCES participants(id) ON DELETE CASCADE; + +-- Step 5: Enforce NOT NULL Constraint +ALTER TABLE daily_scheduled_rewards ALTER COLUMN participant_id SET NOT NULL; +ALTER TABLE daily_reward_transfers ALTER COLUMN to_address_id SET NOT NULL; + +-- Step 6: Drop old indexes referencing participant_address +DROP INDEX IF EXISTS daily_reward_transfers_to_address_day; +DROP INDEX IF EXISTS idx_daily_scheduled_rewards_participant_address; + +-- Step 7: Drop Old participant_address Columns (if they exist) +ALTER TABLE daily_scheduled_rewards DROP COLUMN IF EXISTS participant_address; +ALTER TABLE daily_reward_transfers DROP COLUMN IF EXISTS to_address; diff --git a/db/migrations/011.do.index-daily-scheduled-rewards.sql b/db/migrations/011.do.index-daily-scheduled-rewards.sql new file mode 100644 index 0000000..fe0b498 --- /dev/null +++ b/db/migrations/011.do.index-daily-scheduled-rewards.sql @@ -0,0 +1,2 @@ +CREATE INDEX CONCURRENTLY idx_daily_scheduled_rewards_pid_day +ON daily_scheduled_rewards (participant_id, day DESC); diff --git a/db/migrations/012.do.index-daily-reward-transfers.sql b/db/migrations/012.do.index-daily-reward-transfers.sql new file mode 100644 index 0000000..acaf8ff --- /dev/null +++ b/db/migrations/012.do.index-daily-reward-transfers.sql @@ -0,0 +1,2 @@ +CREATE INDEX CONCURRENTLY idx_daily_reward_transfers_to_address_day +ON daily_reward_transfers (to_address_id, day DESC); diff --git a/db/test-helpers.js b/db/test-helpers.js index 64bd980..0ea0081 100644 --- a/db/test-helpers.js +++ b/db/test-helpers.js @@ -1,20 +1,27 @@ import { mapParticipantsToIds } from '@filecoin-station/spark-evaluate/lib/platform-stats.js' /** - * @param {import('./typings.js').Queryable} pgPool + * Populate daily participants in spark_evaluate database + * + * @param {import('./typings.js').PgPoolEvaluate} pgPool * @param {string} day * @param {string[]} participantAddresses */ -export const givenDailyParticipants = async (pgPool, day, participantAddresses) => { +export const givenDailyParticipants = async ( + pgPool, + day, + participantAddresses +) => { const ids = await mapParticipantsToIds(pgPool, new Set(participantAddresses)) - await pgPool.query(` + + await pgPool.query( + ` INSERT INTO daily_participants (day, participant_id) SELECT $1 as day, UNNEST($2::INT[]) AS participant_id ON CONFLICT DO NOTHING - `, [ - day, - Array.from(ids.values()) - ]) + `, + [day, Array.from(ids.values())] + ) } /** @@ -23,12 +30,49 @@ export const givenDailyParticipants = async (pgPool, day, participantAddresses) * @param {number} count */ export const givenDailyDesktopUsers = async (pgPool, day, count) => { - await pgPool.query(` + await pgPool.query( + ` INSERT INTO daily_desktop_users (day, user_count) VALUES ($1, $2) ON CONFLICT DO NOTHING - `, [ - day, - count - ]) + `, + [day, count] + ) +} + +// Map addresses and insert into daily_scheduled_rewards +export const givenScheduledRewards = async (pgClient, day, rewardsMap) => { + const addresses = Array.from(rewardsMap.keys()) + const addressMap = await mapParticipantsToIds(pgClient, new Set(addresses)) + + for (const [address, rewards] of rewardsMap.entries()) { + const id = addressMap.get(address) + await pgClient.query( + ` + INSERT INTO daily_scheduled_rewards (day, participant_id, scheduled_rewards) + VALUES ($1, $2, $3) + `, + [day, id, rewards] + ) + } +} + +// Map address and insert into daily_reward_transfers +export const givenRewardTransfer = async ( + pgClient, + day, + address, + amount, + lastCheckedBlock = 0 +) => { + const addressMap = await mapParticipantsToIds(pgClient, new Set([address])) + const id = addressMap.get(address) + + await pgClient.query( + ` + INSERT INTO daily_reward_transfers (day, to_address_id, amount, last_checked_block) + VALUES ($1, $2, $3, $4) + `, + [day, id, amount, lastCheckedBlock] + ) } diff --git a/observer/lib/map-participants-to-ids.js b/observer/lib/map-participants-to-ids.js new file mode 100644 index 0000000..06b8765 --- /dev/null +++ b/observer/lib/map-participants-to-ids.js @@ -0,0 +1,73 @@ +// This is a copy of the code from spark-evaluate: +// https://github.com/CheckerNetwork/spark-evaluate/blob/7548057f3c9609c4bc52baf896b0a85d7a7f8197/lib/platform-stats.js#L154-L219 + +import assert from 'node:assert' +import createDebug from 'debug' +const debug = createDebug('spark:observer:map-participants-to-ids') + +/** + * @param {import('@filecoin-station/spark-stats-db').Queryable} pgClient + * @param {Set} participantsSet + * @returns {Promise>} A map of participant addresses to ids. + */ +export const mapParticipantsToIds = async (pgClient, participantsSet) => { + debug('Mapping participants to id, count=%s', participantsSet.size) + + /** @type {Map} */ + const participantsMap = new Map() + + // TODO: We can further optimise performance of this function by using + // an in-memory LRU cache. Our network has currently ~2k participants, + // we need ~50 bytes for each (address, id) pair, that's only ~100KB of data. + + // TODO: passing the entire list of participants as a single query parameter + // will probably not scale beyond several thousands of addresses. We will + // need to rework the queries to split large arrays into smaller batches. + + // In most rounds, we have already seen most of the participant addresses + // If we use "INSERT...ON CONFLICT", then PG increments id counter even for + // existing addresses where we end up skipping the insert. This could quickly + // exhaust the space of all 32bit integers. + // Solution: query the table for know records before running the insert. + // + // Caveat: In my testing, this query was not able to leverage the (unique) + // index on participants.participant_address and performed a full table scan + // after the array grew past ~10 items. If this becomes a problem, we can + // introduce the LRU cache mentioned above. + const { rows: found } = await pgClient.query( + 'SELECT * FROM participants WHERE participant_address = ANY($1::TEXT[])', + [Array.from(participantsSet.values())] + ) + debug('Known participants count=%s', found.length) + + // eslint-disable-next-line camelcase + for (const { id, participant_address } of found) { + participantsMap.set(participant_address, id) + participantsSet.delete(participant_address) + } + + debug('New participant addresses count=%s', participantsSet.size) + + // Register the new addresses. Use "INSERT...ON CONFLICT" to handle the race condition + // where another client may have registered these addresses between our previous + // SELECT query and the next INSERT query. + const newAddresses = Array.from(participantsSet.values()) + debug('Registering new participant addresses, count=%s', newAddresses.length) + const { rows: created } = await pgClient.query(` + INSERT INTO participants (participant_address) + SELECT UNNEST($1::TEXT[]) AS participant_address + ON CONFLICT(participant_address) DO UPDATE + -- this no-op update is needed to populate "RETURNING id, participant_address" + SET participant_address = EXCLUDED.participant_address + RETURNING id, participant_address + `, [ + newAddresses + ]) + + assert.strictEqual(created.length, newAddresses.length) + for (const { id, participant_address: participantAddress } of created) { + participantsMap.set(participantAddress, id) + } + + return participantsMap +} diff --git a/observer/lib/observer.js b/observer/lib/observer.js index 199ff7f..20d4555 100644 --- a/observer/lib/observer.js +++ b/observer/lib/observer.js @@ -1,6 +1,7 @@ import { updateDailyTransferStats } from './platform-stats.js' import * as Sentry from '@sentry/node' import assert from 'node:assert' +import { mapParticipantsToIds } from './map-participants-to-ids.js' /** * Observe the transfer events on the Filecoin blockchain @@ -24,12 +25,32 @@ export const observeTransferEvents = async (pgPoolStats, ieContract, provider) = const events = await ieContract.queryFilter(ieContract.filters.Transfer(), queryFromBlock) console.log(`Found ${events.length} Transfer events`) + + const filteredEvents = events.filter(isEventLog) + + // gather addresses + const addresses = new Set() + for (const event of filteredEvents) { + addresses.add(event.args.to) + } + + const addressMap = await mapParticipantsToIds(pgPoolStats, addresses) + + // handle events now that every toAddress is guaranteed an ID for (const event of events.filter(isEventLog)) { + const toAddress = event.args.to + const toAddressId = addressMap.get(toAddress) + if (!toAddressId) { + console.warn('Could not find or create participant for address:', toAddress) + continue + } + const transferEvent = { - toAddress: event.args.to, + toAddressId, amount: event.args.amount } - console.log('Transfer event:', transferEvent) + + // 2) Update call to accommodate `to_address_id` await updateDailyTransferStats(pgPoolStats, transferEvent, currentBlockNumber) } @@ -65,6 +86,11 @@ export const observeScheduledRewards = async (pgPools, ieContract, fetch = globa JOIN daily_participants d ON p.id = d.participant_id WHERE d.day >= now() - interval '3 days' `) + + // The query above fetched participant addresses from the spark_evaluate database + // Now we need to register those participants in the spark_stats database too + const addressToIdMap = await mapParticipantsToIds(pgPools.stats, new Set(rows.map(r => r.participant_address))) + for (const { participant_address: address } of rows) { let scheduledRewards try { @@ -79,13 +105,15 @@ export const observeScheduledRewards = async (pgPools, ieContract, fetch = globa continue } console.log('Scheduled rewards for', address, scheduledRewards) + const participantId = addressToIdMap.get(address) + await pgPools.stats.query(` INSERT INTO daily_scheduled_rewards - (day, participant_address, scheduled_rewards) + (day, participant_id, scheduled_rewards) VALUES (now(), $1, $2) - ON CONFLICT (day, participant_address) DO UPDATE SET - scheduled_rewards = EXCLUDED.scheduled_rewards - `, [address, scheduledRewards]) + ON CONFLICT (day, participant_id) DO UPDATE SET + scheduled_rewards = EXCLUDED.scheduled_rewards + `, [participantId, scheduledRewards]) } } diff --git a/observer/lib/platform-stats.js b/observer/lib/platform-stats.js index 507f415..f99a856 100644 --- a/observer/lib/platform-stats.js +++ b/observer/lib/platform-stats.js @@ -1,16 +1,18 @@ /** + * * @param {import('@filecoin-station/spark-stats-db').Queryable} pgClient * @param {Object} transferEvent - * @param {string} transferEvent.toAddress - * @param {number} transferEvent.amount + * @param {BigInt | number | string} transferEvent.amount + * @param {number} transferEvent.toAddressId * @param {number} currentBlockNumber */ export const updateDailyTransferStats = async (pgClient, transferEvent, currentBlockNumber) => { await pgClient.query(` - INSERT INTO daily_reward_transfers (day, to_address, amount, last_checked_block) + INSERT INTO daily_reward_transfers + (day, to_address_id, amount, last_checked_block) VALUES (now(), $1, $2, $3) - ON CONFLICT (day, to_address) DO UPDATE SET + ON CONFLICT (day, to_address_id) DO UPDATE SET amount = daily_reward_transfers.amount + EXCLUDED.amount, last_checked_block = EXCLUDED.last_checked_block - `, [transferEvent.toAddress, transferEvent.amount, currentBlockNumber]) + `, [transferEvent.toAddressId, transferEvent.amount, currentBlockNumber]) } diff --git a/observer/test/observer.test.js b/observer/test/observer.test.js index 43dcc71..01997cb 100644 --- a/observer/test/observer.test.js +++ b/observer/test/observer.test.js @@ -31,7 +31,7 @@ describe('observer', () => { beforeEach(async () => { await pgPools.stats.query('DELETE FROM daily_reward_transfers') - + await pgPools.stats.query('DELETE FROM participants') ieContractMock = { filters: { Transfer: () => 'TransferEventFilter' @@ -42,6 +42,36 @@ describe('observer', () => { getBlockNumber: async () => 2000 } }) + it('should create a new participant if not found in the participants table', async () => { + // 1) Make the contract return an event for a new address 'address3' + ieContractMock.queryFilter = async (eventName, fromBlock) => { + return [ + { args: { to: 'address3', amount: 400 }, blockNumber: 2000 } + ] + } + + // 2) Run the observer function + const numEvents = await observeTransferEvents(pgPools.stats, ieContractMock, providerMock) + + // Should have processed 1 new event + assert.strictEqual(numEvents, 1) + + // 3) Check that a new participant row got created for 'address3' + const { rows: participantRows } = await pgPools.stats.query(` + SELECT id, participant_address + FROM participants + WHERE participant_address = 'address3' + `) + assert.strictEqual(participantRows.length, 1, 'Should have created a new participant for address3') + + // 4) Check daily_reward_transfers references that new participant + const { rows: transferRows } = await pgPools.stats.query(` + SELECT to_address_id, amount, last_checked_block + FROM daily_reward_transfers + `) + assert.strictEqual(transferRows.length, 1, 'Should have inserted a new record in daily_reward_transfers') + assert.strictEqual(transferRows[0].amount, '400') + }) it('should correctly observe and update transfer events', async () => { ieContractMock.queryFilter = async (eventName, fromBlock) => { @@ -55,7 +85,10 @@ describe('observer', () => { await observeTransferEvents(pgPools.stats, ieContractMock, providerMock) const { rows } = await pgPools.stats.query(` - SELECT day::TEXT, to_address, amount, last_checked_block FROM daily_reward_transfers + SELECT day::TEXT, participant_address as to_address, amount, last_checked_block + FROM daily_reward_transfers + LEFT JOIN participants ON daily_reward_transfers.to_address_id = participants.id + ORDER BY to_address_id `) assert.strictEqual(rows.length, 1) assert.deepStrictEqual(rows, [{ @@ -75,8 +108,10 @@ describe('observer', () => { await observeTransferEvents(pgPools.stats, ieContractMock, providerMock) const { rows } = await pgPools.stats.query(` - SELECT day::TEXT, to_address, amount, last_checked_block FROM daily_reward_transfers - ORDER BY to_address + SELECT day::TEXT, participant_address as to_address, amount, last_checked_block + FROM daily_reward_transfers + LEFT JOIN participants ON daily_reward_transfers.to_address_id = participants.id + ORDER BY to_address_id `) assert.strictEqual(rows.length, 2) assert.deepStrictEqual(rows, [ @@ -101,7 +136,10 @@ describe('observer', () => { assert.strictEqual(numEvents2, 0) const { rows } = await pgPools.stats.query(` - SELECT day::TEXT, to_address, amount, last_checked_block FROM daily_reward_transfers + SELECT day::TEXT, participant_address as to_address, amount, last_checked_block + FROM daily_reward_transfers + LEFT JOIN participants ON daily_reward_transfers.to_address_id = participants.id + ORDER BY to_address_id `) assert.strictEqual(rows.length, 1) assert.deepStrictEqual(rows, [{ @@ -123,16 +161,19 @@ describe('observer', () => { await observeTransferEvents(pgPools.stats, ieContractMock, providerMock) const { rows } = await pgPools.stats.query(` - SELECT day::TEXT, to_address, amount, last_checked_block FROM daily_reward_transfers - ORDER BY to_address + SELECT day::TEXT, participant_address as to_address, amount, last_checked_block + FROM daily_reward_transfers + LEFT JOIN participants ON daily_reward_transfers.to_address_id = participants.id + ORDER BY to_address_id `) assert.strictEqual(rows.length, 1) - assert.deepStrictEqual(rows, [ - { day: today(), to_address: 'address1', amount: '250', last_checked_block: 2500 } - ]) + assert.deepStrictEqual(rows, [{ + day: today(), to_address: 'address1', amount: '250', last_checked_block: 2500 + }]) }) }) + // 2) Insert participant for scheduled rewards test describe('observeScheduledRewards', () => { beforeEach(async () => { await pgPools.evaluate.query('DELETE FROM recent_station_details') @@ -140,6 +181,8 @@ describe('observer', () => { await pgPools.evaluate.query('DELETE FROM daily_participants') await pgPools.evaluate.query('DELETE FROM participants') await pgPools.stats.query('DELETE FROM daily_scheduled_rewards') + + // NOTE: these participants are defined in the spark_evaluate database! await givenDailyParticipants(pgPools.evaluate, today(), ['0xCURRENT']) await givenDailyParticipants(pgPools.evaluate, '2000-01-01', ['0xOLD']) }) @@ -148,40 +191,69 @@ describe('observer', () => { /** @type {any} */ const ieContract = { rewardsScheduledFor: async (address) => { - if (address === '0xCURRENT') { - return 100n - } else { - throw new Error('Should never be called') - } + assert.strictEqual(address, '0xCURRENT') + return 100n } } + const fetchMock = async url => { assert.strictEqual(url, 'https://spark-rewards.fly.dev/scheduled-rewards/0xCURRENT') return new Response(JSON.stringify('10')) } + await observeScheduledRewards(pgPools, ieContract, fetchMock) + const { rows } = await pgPools.stats.query(` SELECT participant_address, scheduled_rewards FROM daily_scheduled_rewards + LEFT JOIN participants ON daily_scheduled_rewards.participant_id = participants.id `) - assert.deepStrictEqual(rows, [{ - participant_address: '0xCURRENT', - scheduled_rewards: '110' + + const formattedRows = rows.map(row => ({ + participantAddress: row.participant_address, + scheduledRewards: row.scheduled_rewards + })) + + assert.deepStrictEqual(formattedRows, [{ + participantAddress: '0xCURRENT', + scheduledRewards: '110' }]) }) + it('updates scheduled rewards', async () => { /** @type {any} */ const ieContract = { - rewardsScheduledFor: async () => 200n + rewardsScheduledFor: async (address) => { + console.log('rewardsScheduledFor(%s)', address) + if (address === '0xCURRENT') { + return 200n + } else { + throw new Error(`Unexpected address queried: ${address}`) + } + } } - await observeScheduledRewards(pgPools, ieContract) + + const fetchMock = async url => { + assert.strictEqual(url, 'https://spark-rewards.fly.dev/scheduled-rewards/0xCURRENT') + return new Response(JSON.stringify('0')) + } + + await observeScheduledRewards(pgPools, ieContract, fetchMock) + const { rows } = await pgPools.stats.query(` SELECT participant_address, scheduled_rewards FROM daily_scheduled_rewards + LEFT JOIN participants ON daily_scheduled_rewards.participant_id = participants.id `) - assert.deepStrictEqual(rows, [{ - participant_address: '0xCURRENT', - scheduled_rewards: '200' + + const formattedRows = rows.map(row => ({ + participantAddress: row.participant_address, + scheduledRewards: row.scheduled_rewards + })) + + assert.deepStrictEqual(formattedRows, [{ + participantAddress: '0xCURRENT', + scheduledRewards: '200' }]) }) }) diff --git a/observer/test/platform-stats.test.js b/observer/test/platform-stats.test.js index b2128d5..5451037 100644 --- a/observer/test/platform-stats.test.js +++ b/observer/test/platform-stats.test.js @@ -1,8 +1,9 @@ import assert from 'node:assert' -import { beforeEach, describe, it } from 'mocha' +import { beforeEach, describe, it, before, after, afterEach } from 'mocha' import { getStatsPgPool, migrateStatsDB } from '@filecoin-station/spark-stats-db' import { updateDailyTransferStats } from '../lib/platform-stats.js' +import { mapParticipantsToIds } from '../lib/map-participants-to-ids.js' describe('platform-stats-generator', () => { /** @type {import('pg').PoolClient} */ @@ -15,12 +16,19 @@ describe('platform-stats-generator', () => { }) let today + let addressMap beforeEach(async () => { await pgClient.query('DELETE FROM daily_reward_transfers') - + await pgClient.query('DELETE FROM participants') // Run all tests inside a transaction to ensure `now()` always returns the same value // See https://dba.stackexchange.com/a/63549/125312 // This avoids subtle race conditions when the tests are executed around midnight. + + // Insert participants via mapParticipantsToIds + const addresses = new Set(['address1', 'address2']) + addressMap = await mapParticipantsToIds(pgClient, addresses) + + // Run all tests inside a transaction to ensure consistency in `now()` await pgClient.query('BEGIN TRANSACTION') today = await getCurrentDate() }) @@ -35,31 +43,58 @@ describe('platform-stats-generator', () => { describe('updateDailyTransferStats', () => { it('should correctly update daily Transfer stats with new transfer events', async () => { - await updateDailyTransferStats(pgClient, { toAddress: 'address1', amount: 100 }, 1) - await updateDailyTransferStats(pgClient, { toAddress: 'address1', amount: 200 }, 2) + await updateDailyTransferStats(pgClient, { + toAddressId: addressMap.get('address1'), + amount: 100 + }, 1) + + await updateDailyTransferStats(pgClient, { + toAddressId: addressMap.get('address1'), + amount: 200 + }, 2) const { rows } = await pgClient.query(` - SELECT day::TEXT, to_address, amount, last_checked_block FROM daily_reward_transfers - `) + SELECT day::TEXT, to_address_id, amount, last_checked_block FROM daily_reward_transfers + `) assert.strictEqual(rows.length, 1) assert.deepStrictEqual(rows, [{ - day: today, to_address: 'address1', amount: '300', last_checked_block: 2 + day: today, + to_address_id: addressMap.get('address1'), + amount: '300', + last_checked_block: 2 }]) }) it('should handle multiple addresses in daily Transfer stats', async () => { - await updateDailyTransferStats(pgClient, { toAddress: 'address1', amount: 50 }, 1) - await updateDailyTransferStats(pgClient, { toAddress: 'address2', amount: 150 }, 1) + await updateDailyTransferStats(pgClient, { + toAddressId: addressMap.get('address1'), + amount: 50 + }, 1) + + await updateDailyTransferStats(pgClient, { + toAddressId: addressMap.get('address2'), + amount: 150 + }, 1) const { rows } = await pgClient.query(` - SELECT day::TEXT, to_address, amount, last_checked_block FROM daily_reward_transfers - ORDER BY to_address + SELECT day::TEXT, to_address_id, amount, last_checked_block FROM daily_reward_transfers + ORDER BY to_address_id `) assert.strictEqual(rows.length, 2) assert.deepStrictEqual(rows, [ - { day: today, to_address: 'address1', amount: '50', last_checked_block: 1 }, - { day: today, to_address: 'address2', amount: '150', last_checked_block: 1 } + { + day: today, + to_address_id: addressMap.get('address1'), + amount: '50', + last_checked_block: 1 + }, + { + day: today, + to_address_id: addressMap.get('address2'), + amount: '150', + last_checked_block: 1 + } ]) }) }) diff --git a/stats/lib/platform-stats-fetchers.js b/stats/lib/platform-stats-fetchers.js index d4533f9..2919bde 100644 --- a/stats/lib/platform-stats-fetchers.js +++ b/stats/lib/platform-stats-fetchers.js @@ -75,10 +75,13 @@ export const fetchDailyRewardTransfers = async (pgPool, filter) => { 'Date range must be 31 days max' ) const { rows } = await pgPool.query(` - SELECT day::TEXT, to_address, amount - FROM daily_reward_transfers - WHERE day >= $1 AND day <= $2 + SELECT drt.day::TEXT, p.participant_address AS to_address, drt.amount + FROM daily_reward_transfers drt + JOIN participants p ON drt.to_address_id = p.id + WHERE drt.day BETWEEN $1 AND $2 + ORDER BY drt.day `, [filter.from, filter.to]) + const days = {} for (const row of rows) { if (!days[row.day]) { @@ -136,20 +139,23 @@ export const fetchTopEarningParticipants = async (pgPool, filter) => { assert(filter.to === today(), 400, 'filter.to must be today, other values are not supported') const { rows } = await pgPool.query(` WITH latest_scheduled_rewards AS ( - SELECT DISTINCT ON (participant_address) participant_address, scheduled_rewards + SELECT DISTINCT ON (participant_id) participant_id, scheduled_rewards FROM daily_scheduled_rewards - ORDER BY participant_address, day DESC + ORDER BY participant_id, day DESC ) SELECT - COALESCE(drt.to_address, lsr.participant_address) as participant_address, - COALESCE(SUM(drt.amount), 0) + COALESCE(lsr.scheduled_rewards, 0) as total_rewards + p.participant_address, + COALESCE(SUM(drt.amount), 0) + COALESCE(lsr.scheduled_rewards, 0) AS total_rewards FROM daily_reward_transfers drt FULL OUTER JOIN latest_scheduled_rewards lsr - ON drt.to_address = lsr.participant_address - WHERE (drt.day >= $1 AND drt.day <= $2) OR drt.day IS NULL - GROUP BY COALESCE(drt.to_address, lsr.participant_address), lsr.scheduled_rewards - ORDER BY total_rewards DESC + ON drt.to_address_id = lsr.participant_id + JOIN participants p + ON p.id = COALESCE(drt.to_address_id, lsr.participant_id) + WHERE (drt.day BETWEEN $1 AND $2) OR drt.day IS NULL + GROUP BY p.id, p.participant_address, lsr.scheduled_rewards + ORDER BY total_rewards DESC; `, [filter.from, filter.to]) + return rows } diff --git a/stats/lib/stats-fetchers.js b/stats/lib/stats-fetchers.js index 8179231..943098f 100644 --- a/stats/lib/stats-fetchers.js +++ b/stats/lib/stats-fetchers.js @@ -187,12 +187,15 @@ export const fetchParticipantChangeRates = async (pgPools, filter) => { * @param {import('./typings.js').DateRangeFilter} filter * @param {string} address */ -export const fetchParticipantScheduledRewards = async (pgPools, { from, to }, address) => { +export const fetchParticipantScheduledRewards = async (pgPools, filter, address) => { const { rows } = await pgPools.stats.query(` - SELECT day::text, scheduled_rewards - FROM daily_scheduled_rewards - WHERE participant_address = $1 AND day >= $2 AND day <= $3 - `, [address, from, to]) + SELECT dsr.day::TEXT, dsr.scheduled_rewards + FROM daily_scheduled_rewards dsr + JOIN participants p ON dsr.participant_id = p.id + WHERE p.participant_address = $1 + AND dsr.day BETWEEN $2 AND $3 + ORDER BY dsr.day + `, [address, filter.from, filter.to]) return rows } @@ -201,12 +204,15 @@ export const fetchParticipantScheduledRewards = async (pgPools, { from, to }, ad * @param {import('./typings.js').DateRangeFilter} filter * @param {string} address */ -export const fetchParticipantRewardTransfers = async (pgPools, { from, to }, address) => { +export const fetchParticipantRewardTransfers = async (pgPools, filter, address) => { const { rows } = await pgPools.stats.query(` - SELECT day::TEXT, amount - FROM daily_reward_transfers - WHERE to_address = $1 AND day >= $2 AND day <= $3 - `, [address, from, to]) + SELECT drt.day::TEXT, drt.amount + FROM daily_reward_transfers drt + JOIN participants p ON drt.to_address_id = p.id + WHERE p.participant_address = $1 + AND drt.day BETWEEN $2 AND $3 + ORDER BY drt.day + `, [address, filter.from, filter.to]) return rows } diff --git a/stats/test/app.test.js b/stats/test/app.test.js index 5fa9dde..65ab2eb 100644 --- a/stats/test/app.test.js +++ b/stats/test/app.test.js @@ -1,7 +1,10 @@ import assert from 'node:assert' import { getPgPools } from '@filecoin-station/spark-stats-db' -import { givenDailyParticipants } from '@filecoin-station/spark-stats-db/test-helpers.js' - +import { + givenDailyParticipants, + givenScheduledRewards, + givenRewardTransfer +} from '@filecoin-station/spark-stats-db/test-helpers.js' import { assertResponseStatus } from './test-helpers.js' import { createApp } from '../lib/app.js' import { today } from '../lib/request-helpers.js' @@ -21,9 +24,10 @@ describe('HTTP request handler', () => { SPARK_API_BASE_URL: 'https://api.filspark.com/', pgPools, logger: { - level: process.env.DEBUG === '*' || process.env.DEBUG?.includes('test') - ? 'debug' - : 'error' + level: + process.env.DEBUG === '*' || process.env.DEBUG?.includes('test') + ? 'debug' + : 'error' } }) @@ -67,34 +71,87 @@ describe('HTTP request handler', () => { it('returns today stats for no query string', async () => { const day = today() - await givenRetrievalStats(pgPools.evaluate, { day, total: 10, successful: 1, successfulHttp: 0, successfulHttpHead: 0 }) - const res = await fetch(new URL('/retrieval-success-rate', baseUrl), { redirect: 'follow' }) + await givenRetrievalStats(pgPools.evaluate, { + day, + total: 10, + successful: 1, + successfulHttp: 0, + successfulHttpHead: 0 + }) + const res = await fetch(new URL('/retrieval-success-rate', baseUrl), { + redirect: 'follow' + }) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { day, success_rate: 0.1, successful: '1', total: '10', successful_http: '0', success_rate_http: 0, success_rate_http_head: 0 } + { + day, + success_rate: 0.1, + successful: '1', + total: '10', + successful_http: '0', + success_rate_http: 0, + success_rate_http_head: 0 + } ]) }) it('applies from & to in YYYY-MM-DD format', async () => { - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', total: 10, successful: 1, successfulHttp: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-11', total: 20, successful: 1, successfulHttp: 0 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-12', total: 30, successful: 3, successfulHttp: 3 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-13', total: 40, successful: 1, successfulHttp: 1 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + total: 20, + successful: 1, + successfulHttp: 0 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + total: 30, + successful: 3, + successfulHttp: 3 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-13', + total: 40, + successful: 1, + successfulHttp: 1 + }) const res = await fetch( new URL( '/retrieval-success-rate?from=2024-01-11&to=2024-01-12', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { day: '2024-01-11', success_rate: 0.05, successful: '1', total: '20', successful_http: '0', success_rate_http: 0, success_rate_http_head: null }, - { day: '2024-01-12', success_rate: 0.1, successful: '3', total: '30', successful_http: '3', success_rate_http: 0.1, success_rate_http_head: null } + { + day: '2024-01-11', + success_rate: 0.05, + successful: '1', + total: '20', + successful_http: '0', + success_rate_http: 0, + success_rate_http_head: null + }, + { + day: '2024-01-12', + success_rate: 0.1, + successful: '3', + total: '30', + successful_http: '3', + success_rate_http: 0.1, + success_rate_http_head: null + } ]) }) @@ -103,7 +160,8 @@ describe('HTTP request handler', () => { new URL( '/retrieval-success-rate?from=2024-01-10T13:44:44.289Z&to=2024-01-15T09:44:44.289Z', baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -116,140 +174,324 @@ describe('HTTP request handler', () => { it('caches data including today for short time', async () => { const res = await fetch( - new URL(`/retrieval-success-rate?from=2024-01-01&to=${today()}`, baseUrl), + new URL( + `/retrieval-success-rate?from=2024-01-01&to=${today()}`, + baseUrl + ), { redirect: 'manual' } ) await assertResponseStatus(res, 200) - assert.strictEqual(res.headers.get('cache-control'), 'public, max-age=600') + assert.strictEqual( + res.headers.get('cache-control'), + 'public, max-age=600' + ) }) it('caches historical including for long time & marks them immutable', async () => { const res = await fetch( - new URL('/retrieval-success-rate?from=2023-01-01&to=2023-12-31', baseUrl), + new URL( + '/retrieval-success-rate?from=2023-01-01&to=2023-12-31', + baseUrl + ), { redirect: 'manual' } ) await assertResponseStatus(res, 200) - assert.strictEqual(res.headers.get('cache-control'), 'public, max-age=31536000, immutable') + assert.strictEqual( + res.headers.get('cache-control'), + 'public, max-age=31536000, immutable' + ) }) it('sums daily retrievals from all miners', async () => { - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', total: 10, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', total: 100, successful: 50, successfulHttp: 35, successfulHttpHead: 35 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-11', minerId: 'f1one', total: 20, successful: 1, successfulHttp: 0, successfulHttpHead: 0 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-11', minerId: 'f1two', total: 200, successful: 60, successfulHttp: 50, successfulHttpHead: 50 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1two', + total: 100, + successful: 50, + successfulHttp: 35, + successfulHttpHead: 35 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + minerId: 'f1one', + total: 20, + successful: 1, + successfulHttp: 0, + successfulHttpHead: 0 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + minerId: 'f1two', + total: 200, + successful: 60, + successfulHttp: 50, + successfulHttpHead: 50 + }) const res = await fetch( new URL( '/retrieval-success-rate?from=2024-01-10&to=2024-01-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ - { day: '2024-01-10', success_rate: 51 / 110, total: '110', successful: '51', successful_http: '36', success_rate_http: 36 / 110, success_rate_http_head: 36 / 110 }, - { day: '2024-01-11', success_rate: 61 / 220, total: '220', successful: '61', successful_http: '50', success_rate_http: 50 / 220, success_rate_http_head: 50 / 220 } + { + day: '2024-01-10', + success_rate: 51 / 110, + total: '110', + successful: '51', + successful_http: '36', + success_rate_http: 36 / 110, + success_rate_http_head: 36 / 110 + }, + { + day: '2024-01-11', + success_rate: 61 / 220, + total: '220', + successful: '61', + successful_http: '50', + success_rate_http: 50 / 220, + success_rate_http_head: 50 / 220 + } ]) }) it('sorts items by date ascending', async () => { - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', total: 10, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', total: 10, successful: 5, successfulHttp: 3, successfulHttpHead: 3 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + total: 10, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + total: 10, + successful: 5, + successfulHttp: 3, + successfulHttpHead: 3 + }) const res = await fetch( new URL( '/retrieval-success-rate?from=2024-01-01&to=2024-01-31', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = (/** @type {{ day: string, success_rate: number }[]} */ + const stats = + /** @type {{ day: string, success_rate: number }[]} */ await res.json() - ) assert.deepStrictEqual(stats, [ - { day: '2024-01-10', success_rate: 5 / 10, total: '10', successful: '5', successful_http: '3', success_rate_http: 3 / 10, success_rate_http_head: 3 / 10 }, - { day: '2024-01-20', success_rate: 1 / 10, total: '10', successful: '1', successful_http: '1', success_rate_http: 1 / 10, success_rate_http_head: 1 / 10 } + { + day: '2024-01-10', + success_rate: 5 / 10, + total: '10', + successful: '5', + successful_http: '3', + success_rate_http: 3 / 10, + success_rate_http_head: 3 / 10 + }, + { + day: '2024-01-20', + success_rate: 1 / 10, + total: '10', + successful: '1', + successful_http: '1', + success_rate_http: 1 / 10, + success_rate_http_head: 1 / 10 + } ]) }) it('filters out miners with zero RSR when asked', async () => { - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', total: 10, successful: 1, minerId: 'f1one', successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', total: 10, successful: 0, minerId: 'f1two', successfulHttp: 0, successfulHttpHead: 0 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + total: 10, + successful: 1, + minerId: 'f1one', + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + total: 10, + successful: 0, + minerId: 'f1two', + successfulHttp: 0, + successfulHttpHead: 0 + }) const res = await fetch( new URL( '/retrieval-success-rate?from=2024-01-01&to=2024-01-31&nonZero=true', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ - { day: '2024-01-20', success_rate: 1 / 10, successful: '1', total: '10', successful_http: '1', success_rate_http: 1 / 10, success_rate_http_head: 1 / 10 } + { + day: '2024-01-20', + success_rate: 1 / 10, + successful: '1', + total: '10', + successful_http: '1', + success_rate_http: 1 / 10, + success_rate_http_head: 1 / 10 + } ]) }) it('preserves additional query string arguments when redirecting', async () => { const day = today() - await givenRetrievalStats(pgPools.evaluate, { day, total: 10, successful: 1, minerId: 'f1one', successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day, total: 10, successful: 0, minerId: 'f1two', successfulHttp: 0, successfulHttpHead: 0 }) - const res = await fetch(new URL('/retrieval-success-rate?nonZero=true', baseUrl), { redirect: 'follow' }) + await givenRetrievalStats(pgPools.evaluate, { + day, + total: 10, + successful: 1, + minerId: 'f1one', + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day, + total: 10, + successful: 0, + minerId: 'f1two', + successfulHttp: 0, + successfulHttpHead: 0 + }) + const res = await fetch( + new URL('/retrieval-success-rate?nonZero=true', baseUrl), + { redirect: 'follow' } + ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { day, success_rate: 0.1, successful: '1', total: '10', successful_http: '1', success_rate_http: 0.1, success_rate_http_head: 0.1 } + { + day, + success_rate: 0.1, + successful: '1', + total: '10', + successful_http: '1', + success_rate_http: 0.1, + success_rate_http_head: 0.1 + } ]) }) it('handles successful_http values 0, null, undefined', async () => { - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', total: 10, successful: 1, successfulHttp: 0 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-21', total: 10, successful: 1, successfulHttp: undefined }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-22', total: 10, successful: 1, successfulHttp: null }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + total: 10, + successful: 1, + successfulHttp: 0 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + total: 10, + successful: 1, + successfulHttp: undefined + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-22', + total: 10, + successful: 1, + successfulHttp: null + }) const res = await fetch( new URL( '/retrieval-success-rate?from=2024-01-20&to=2024-01-22', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { day: '2024-01-20', success_rate: 0.1, successful: '1', total: '10', successful_http: '0', success_rate_http: 0, success_rate_http_head: null }, - { day: '2024-01-21', success_rate: 0.1, successful: '1', total: '10', successful_http: null, success_rate_http: null, success_rate_http_head: null }, - { day: '2024-01-22', success_rate: 0.1, successful: '1', total: '10', successful_http: null, success_rate_http: null, success_rate_http_head: null } + { + day: '2024-01-20', + success_rate: 0.1, + successful: '1', + total: '10', + successful_http: '0', + success_rate_http: 0, + success_rate_http_head: null + }, + { + day: '2024-01-21', + success_rate: 0.1, + successful: '1', + total: '10', + successful_http: null, + success_rate_http: null, + success_rate_http_head: null + }, + { + day: '2024-01-22', + success_rate: 0.1, + successful: '1', + total: '10', + successful_http: null, + success_rate_http: null, + success_rate_http_head: null + } ]) }) }) describe('GET /participants/daily', () => { it('returns daily active participants for the given date range', async () => { - await givenDailyParticipants(pgPools.evaluate, '2024-01-10', ['0x10', '0x20']) - await givenDailyParticipants(pgPools.evaluate, '2024-01-11', ['0x10', '0x20', '0x30']) - await givenDailyParticipants(pgPools.evaluate, '2024-01-12', ['0x10', '0x20', '0x40', '0x50']) + await givenDailyParticipants(pgPools.evaluate, '2024-01-10', [ + '0x10', + '0x20' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-01-11', [ + '0x10', + '0x20', + '0x30' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-01-12', [ + '0x10', + '0x20', + '0x40', + '0x50' + ]) await givenDailyParticipants(pgPools.evaluate, '2024-01-13', ['0x10']) const res = await fetch( - new URL( - '/participants/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/participants/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) @@ -265,20 +507,36 @@ describe('HTTP request handler', () => { describe('GET /participants/monthly', () => { it('returns monthly active participants for the given date range ignoring the day number', async () => { // before the range - await givenDailyParticipants(pgPools.evaluate, '2023-12-31', ['0x01', '0x02']) + await givenDailyParticipants(pgPools.evaluate, '2023-12-31', [ + '0x01', + '0x02' + ]) // in the range - await givenDailyParticipants(pgPools.evaluate, '2024-01-10', ['0x10', '0x20']) - await givenDailyParticipants(pgPools.evaluate, '2024-01-11', ['0x10', '0x20', '0x30']) - await givenDailyParticipants(pgPools.evaluate, '2024-01-12', ['0x10', '0x20', '0x40', '0x50']) - await givenDailyParticipants(pgPools.evaluate, '2024-02-13', ['0x10', '0x60']) + await givenDailyParticipants(pgPools.evaluate, '2024-01-10', [ + '0x10', + '0x20' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-01-11', [ + '0x10', + '0x20', + '0x30' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-01-12', [ + '0x10', + '0x20', + '0x40', + '0x50' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-02-13', [ + '0x10', + '0x60' + ]) // after the range await givenDailyParticipants(pgPools.evaluate, '2024-03-01', ['0x99']) const res = await fetch( - new URL( - '/participants/monthly?from=2024-01-12&to=2024-02-12', - baseUrl - ), { + new URL('/participants/monthly?from=2024-01-12&to=2024-02-12', baseUrl), + { redirect: 'manual' } ) @@ -294,17 +552,45 @@ describe('HTTP request handler', () => { describe('GET /participants/change-rates', () => { it('returns monthly change rates for the given date range ignoring the day number', async () => { // before the range - await givenDailyParticipants(pgPools.evaluate, '2023-12-31', ['0x01', '0x02']) + await givenDailyParticipants(pgPools.evaluate, '2023-12-31', [ + '0x01', + '0x02' + ]) // the last month before the range - await givenDailyParticipants(pgPools.evaluate, '2024-01-10', ['0x10', '0x20']) - await givenDailyParticipants(pgPools.evaluate, '2024-01-11', ['0x10', '0x20', '0x30']) - await givenDailyParticipants(pgPools.evaluate, '2024-01-12', ['0x10', '0x20', '0x40', '0x50']) + await givenDailyParticipants(pgPools.evaluate, '2024-01-10', [ + '0x10', + '0x20' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-01-11', [ + '0x10', + '0x20', + '0x30' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-01-12', [ + '0x10', + '0x20', + '0x40', + '0x50' + ]) // the first month in the range - 0x50 is gone - await givenDailyParticipants(pgPools.evaluate, '2024-02-11', ['0x10', '0x20']) - await givenDailyParticipants(pgPools.evaluate, '2024-02-13', ['0x20', '0x30', '0x40']) + await givenDailyParticipants(pgPools.evaluate, '2024-02-11', [ + '0x10', + '0x20' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-02-13', [ + '0x20', + '0x30', + '0x40' + ]) // the second month in the range - 0x30 and 0x40 is gone, new participant 0x60 - await givenDailyParticipants(pgPools.evaluate, '2024-03-11', ['0x10', '0x20']) - await givenDailyParticipants(pgPools.evaluate, '2024-03-13', ['0x10', '0x60']) + await givenDailyParticipants(pgPools.evaluate, '2024-03-11', [ + '0x10', + '0x20' + ]) + await givenDailyParticipants(pgPools.evaluate, '2024-03-13', [ + '0x10', + '0x60' + ]) // after the range await givenDailyParticipants(pgPools.evaluate, '2024-04-01', ['0x99']) @@ -312,7 +598,8 @@ describe('HTTP request handler', () => { new URL( '/participants/change-rates?from=2024-02-28&to=2024-03-01', baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -346,7 +633,10 @@ describe('HTTP request handler', () => { it('handles a single-month range', async () => { // the last month before the range - await givenDailyParticipants(pgPools.evaluate, '2024-01-10', ['0x10', '0x20']) + await givenDailyParticipants(pgPools.evaluate, '2024-01-10', [ + '0x10', + '0x20' + ]) // the only month in the range - 0x20 is gone await givenDailyParticipants(pgPools.evaluate, '2024-02-11', ['0x10']) // after the range @@ -356,33 +646,43 @@ describe('HTTP request handler', () => { new URL( '/participants/change-rates?from=2024-02-11&to=2024-02-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() - assert.deepStrictEqual(stats, [{ - month: '2024-02-01', - churnRate: 0.5, - growthRate: 0, - retentionRate: 0.5 - }]) + assert.deepStrictEqual(stats, [ + { + month: '2024-02-01', + churnRate: 0.5, + growthRate: 0, + retentionRate: 0.5 + } + ]) }) }) + beforeEach(async () => { + await pgPools.stats.query('DELETE FROM daily_scheduled_rewards') + await pgPools.stats.query('DELETE FROM daily_reward_transfers') + await pgPools.stats.query('DELETE FROM participants') + }) + describe('GET /participant/:address/scheduled-rewards', () => { it('returns daily scheduled rewards for the given date range', async () => { - await pgPools.stats.query( - 'INSERT INTO daily_scheduled_rewards (day, participant_address, scheduled_rewards) VALUES ($1, $2, $3)', - ['2024-01-11', '0x20', '1'] + await givenScheduledRewards( + pgPools.stats, + '2024-01-11', + new Map([['0x20', '1']]) ) - const res = await fetch( new URL( '/participant/0x20/scheduled-rewards?from=2024-01-11&to=2024-01-12', baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -396,68 +696,152 @@ describe('HTTP request handler', () => { describe('GET /participant/:address/reward-transfers', () => { it('returns daily reward transfers for the given date range', async () => { - await pgPools.stats.query(` - INSERT INTO daily_reward_transfers - (day, to_address, amount, last_checked_block) - VALUES - ($1, $2, $3, $4) - `, ['2024-01-11', '0x00', '1', 0]) + await givenRewardTransfer(pgPools.stats, '2024-01-11', '0x00', '1', 0) const res = await fetch( new URL( - '/participant/0x00/reward-transfers?from=2024-01-11&to=2024-01-12', + '/participant/0x00/reward-transfers?from=2024-01-11&to=2024-03-12', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() - assert.deepStrictEqual(stats, [ - { day: '2024-01-11', amount: '1' } - ]) + assert.deepStrictEqual(stats, [{ day: '2024-01-11', amount: '1' }]) }) }) describe('GET /miners/retrieval-success-rate/summary', () => { it('returns a summary of miners RSR for the given date range', async () => { // before the range - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', total: 10, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', total: 100, successful: 20, successfulHttp: 10, successfulHttpHead: 10 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1two', + total: 100, + successful: 20, + successfulHttp: 10, + successfulHttpHead: 10 + }) // in the range - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-11', minerId: 'f1one', total: 20, successful: 1, successfulHttp: 0, successfulHttpHead: 0 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-11', minerId: 'f1two', total: 200, successful: 150, successfulHttp: 100, successfulHttpHead: 100 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + minerId: 'f1one', + total: 20, + successful: 1, + successfulHttp: 0, + successfulHttpHead: 0 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + minerId: 'f1two', + total: 200, + successful: 150, + successfulHttp: 100, + successfulHttpHead: 100 + }) // after the range - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-12', minerId: 'f1one', total: 30, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-12', minerId: 'f1two', total: 300, successful: 60, successfulHttp: 60, successfulHttpHead: 60 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + minerId: 'f1one', + total: 30, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + minerId: 'f1two', + total: 300, + successful: 60, + successfulHttp: 60, + successfulHttpHead: 60 + }) const res = await fetch( new URL( '/miners/retrieval-success-rate/summary?from=2024-01-11&to=2024-01-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { miner_id: 'f1one', success_rate: 0.05, total: '20', successful: '1', successful_http: '0', success_rate_http: 0, success_rate_http_head: 0 }, - { miner_id: 'f1two', success_rate: 0.75, total: '200', successful: '150', successful_http: '100', success_rate_http: 100 / 200, success_rate_http_head: 100 / 200 } + { + miner_id: 'f1one', + success_rate: 0.05, + total: '20', + successful: '1', + successful_http: '0', + success_rate_http: 0, + success_rate_http_head: 0 + }, + { + miner_id: 'f1two', + success_rate: 0.75, + total: '200', + successful: '150', + successful_http: '100', + success_rate_http: 100 / 200, + success_rate_http_head: 100 / 200 + } ]) }) it('handles successful_http values 0, null, undefined', async () => { - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1one', total: 10, successful: 1, successfulHttp: 0 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1one', total: 10, successful: 1, successfulHttp: undefined }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-22', minerId: 'f1one', total: 10, successful: 1, successfulHttp: null }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-23', minerId: 'f2two', total: 10, successful: 1, successfulHttp: undefined }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-24', minerId: 'f3three', total: 20, successful: 2, successfulHttp: null }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: 0 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: undefined + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-22', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: null + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-23', + minerId: 'f2two', + total: 10, + successful: 1, + successfulHttp: undefined + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-24', + minerId: 'f3three', + total: 20, + successful: 2, + successfulHttp: null + }) let res = await fetch( new URL( '/miners/retrieval-success-rate/summary?from=2024-01-20&to=2024-01-22', baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -465,24 +849,48 @@ describe('HTTP request handler', () => { let stats = await res.json() assert.deepStrictEqual(stats, [ // If there is a single number we expect any undefined or null values to be converted to 0 by Postgres - { miner_id: 'f1one', total: '30', successful: '3', success_rate: 0.1, successful_http: '0', success_rate_http: 0, success_rate_http_head: null } + { + miner_id: 'f1one', + total: '30', + successful: '3', + success_rate: 0.1, + successful_http: '0', + success_rate_http: 0, + success_rate_http_head: null + } ]) res = await fetch( new URL( '/miners/retrieval-success-rate/summary?from=2024-01-23&to=2024-01-24', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) stats = await res.json() assert.deepStrictEqual(stats, [ - { miner_id: 'f2two', total: '10', successful: '1', success_rate: 0.1, successful_http: null, success_rate_http: null, success_rate_http_head: null }, - { miner_id: 'f3three', total: '20', successful: '2', success_rate: 0.1, successful_http: null, success_rate_http: null, success_rate_http_head: null } - ] - ) + { + miner_id: 'f2two', + total: '10', + successful: '1', + success_rate: 0.1, + successful_http: null, + success_rate_http: null, + success_rate_http_head: null + }, + { + miner_id: 'f3three', + total: '20', + successful: '2', + success_rate: 0.1, + successful_http: null, + success_rate_http: null, + success_rate_http_head: null + } + ]) }) }) @@ -503,7 +911,8 @@ describe('HTTP request handler', () => { new URL( '/retrieval-result-codes/daily?from=2024-01-11&to=2024-01-13', baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -520,35 +929,58 @@ describe('HTTP request handler', () => { describe('summary of eligible deals', () => { describe('GET /miner/{id}/deals/eligible/summary', () => { it('redirects to spark-api', async () => { - const res = await fetch(new URL('/miner/f0230/deals/eligible/summary', baseUrl), { redirect: 'manual' }) + const res = await fetch( + new URL('/miner/f0230/deals/eligible/summary', baseUrl), + { redirect: 'manual' } + ) await assertResponseStatus(res, 302) assert.strictEqual(res.headers.get('cache-control'), 'max-age=21600') - assert.strictEqual(res.headers.get('location'), 'https://api.filspark.com/miner/f0230/deals/eligible/summary') + assert.strictEqual( + res.headers.get('location'), + 'https://api.filspark.com/miner/f0230/deals/eligible/summary' + ) }) }) describe('GET /client/{id}/deals/eligible/summary', () => { it('redirects to spark-api', async () => { - const res = await fetch(new URL('/client/f0800/deals/eligible/summary', baseUrl), { redirect: 'manual' }) + const res = await fetch( + new URL('/client/f0800/deals/eligible/summary', baseUrl), + { redirect: 'manual' } + ) await assertResponseStatus(res, 302) assert.strictEqual(res.headers.get('cache-control'), 'max-age=21600') - assert.strictEqual(res.headers.get('location'), 'https://api.filspark.com/client/f0800/deals/eligible/summary') + assert.strictEqual( + res.headers.get('location'), + 'https://api.filspark.com/client/f0800/deals/eligible/summary' + ) }) }) describe('GET /allocator/{id}/deals/eligible/summary', () => { it('redirects to spark-api', async () => { - const res = await fetch(new URL('/allocator/f0500/deals/eligible/summary', baseUrl), { redirect: 'manual' }) + const res = await fetch( + new URL('/allocator/f0500/deals/eligible/summary', baseUrl), + { redirect: 'manual' } + ) await assertResponseStatus(res, 302) assert.strictEqual(res.headers.get('cache-control'), 'max-age=21600') - assert.strictEqual(res.headers.get('location'), 'https://api.filspark.com/allocator/f0500/deals/eligible/summary') + assert.strictEqual( + res.headers.get('location'), + 'https://api.filspark.com/allocator/f0500/deals/eligible/summary' + ) }) }) }) describe('GET /deals/daily', () => { it('returns daily deal stats for the given date range', async () => { - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-10', tested: 10, indexed: 5, retrievable: 1 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-10', + tested: 10, + indexed: 5, + retrievable: 1 + }) await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-11', tested: 20, @@ -558,14 +990,22 @@ describe('HTTP request handler', () => { retrievalMajorityFound: 5, retrievable: 2 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-12', tested: 30, indexed: 7, retrievable: 3 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-13', tested: 40, indexed: 8, retrievable: 4 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-12', + tested: 30, + indexed: 7, + retrievable: 3 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-13', + tested: 40, + indexed: 8, + retrievable: 4 + }) const res = await fetch( - new URL( - '/deals/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/deals/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) @@ -594,80 +1034,142 @@ describe('HTTP request handler', () => { }) it('aggregates stats over miners', async () => { - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-11', minerId: 'f1aa', tested: 10 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-11', minerId: 'f1bb', tested: 20 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-12', minerId: 'f1aa', tested: 30 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-12', minerId: 'f1bb', tested: 40 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-11', + minerId: 'f1aa', + tested: 10 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-11', + minerId: 'f1bb', + tested: 20 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-12', + minerId: 'f1aa', + tested: 30 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-12', + minerId: 'f1bb', + tested: 40 + }) const res = await fetch( - new URL( - '/deals/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/deals/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {any[]} */(await res.json()) - assert.deepStrictEqual(stats.map(({ day, tested }) => ({ day, tested })), [ - { - day: '2024-01-11', - tested: String(10 + 20) - }, - { - day: '2024-01-12', - tested: String(30 + 40) - } - ]) + const stats = /** @type {any[]} */ (await res.json()) + assert.deepStrictEqual( + stats.map(({ day, tested }) => ({ day, tested })), + [ + { + day: '2024-01-11', + tested: String(10 + 20) + }, + { + day: '2024-01-12', + tested: String(30 + 40) + } + ] + ) }) it('aggregates stats over clients', async () => { - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-11', clientId: 'f1aa', tested: 10 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-11', clientId: 'f1bb', tested: 20 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-12', clientId: 'f1aa', tested: 30 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-01-12', minerId: 'f1bb', tested: 40 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-11', + clientId: 'f1aa', + tested: 10 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-11', + clientId: 'f1bb', + tested: 20 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-12', + clientId: 'f1aa', + tested: 30 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-01-12', + minerId: 'f1bb', + tested: 40 + }) const res = await fetch( - new URL( - '/deals/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/deals/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {any[]} */(await res.json()) - assert.deepStrictEqual(stats.map(({ day, tested }) => ({ day, tested })), [ - { - day: '2024-01-11', - tested: String(10 + 20) - }, - { - day: '2024-01-12', - tested: String(30 + 40) - } - ]) + const stats = /** @type {any[]} */ (await res.json()) + assert.deepStrictEqual( + stats.map(({ day, tested }) => ({ day, tested })), + [ + { + day: '2024-01-11', + tested: String(10 + 20) + }, + { + day: '2024-01-12', + tested: String(30 + 40) + } + ] + ) }) }) describe('GET /deals/summary', () => { it('returns deal summary for the given date range (including the end day)', async () => { - await givenDailyDealStats(pgPools.evaluate, { day: '2024-03-12', tested: 200, indexed: 52, retrievable: 2 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-03-12', + tested: 200, + indexed: 52, + retrievable: 2 + }) // filter.to - 7 days -> should be excluded - await givenDailyDealStats(pgPools.evaluate, { day: '2024-03-23', tested: 300, indexed: 53, retrievable: 3 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-03-23', + tested: 300, + indexed: 53, + retrievable: 3 + }) // last 7 days - await givenDailyDealStats(pgPools.evaluate, { day: '2024-03-24', tested: 400, indexed: 54, retrievable: 4 }) - await givenDailyDealStats(pgPools.evaluate, { day: '2024-03-29', tested: 500, indexed: 55, retrievable: 5 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-03-24', + tested: 400, + indexed: 54, + retrievable: 4 + }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-03-29', + tested: 500, + indexed: 55, + retrievable: 5 + }) // `filter.to` (e.g. today) - should be included - await givenDailyDealStats(pgPools.evaluate, { day: '2024-03-30', tested: 6000, indexed: 600, retrievable: 60 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-03-30', + tested: 6000, + indexed: 600, + retrievable: 60 + }) // after the requested range - await givenDailyDealStats(pgPools.evaluate, { day: '2024-03-31', tested: 70000, indexed: 7000, retrievable: 700 }) + await givenDailyDealStats(pgPools.evaluate, { + day: '2024-03-31', + tested: 70000, + indexed: 7000, + retrievable: 700 + }) const res = await fetch( - new URL( - '/deals/summary?from=2024-03-24&to=2024-03-30', - baseUrl - ), { + new URL('/deals/summary?from=2024-03-24&to=2024-03-30', baseUrl), + { redirect: 'manual' } ) @@ -686,10 +1188,8 @@ describe('HTTP request handler', () => { it('handles query for future date with no recorded stats', async () => { const res = await fetch( - new URL( - '/deals/summary?from=3024-04-24&to=3024-03-30', - baseUrl - ), { + new URL('/deals/summary?from=3024-04-24&to=3024-03-30', baseUrl), + { redirect: 'manual' } ) @@ -714,7 +1214,10 @@ describe('HTTP request handler', () => { origin: 'app://-' } }) - assert.strictEqual(res.headers.get('access-control-allow-origin'), 'app://-') + assert.strictEqual( + res.headers.get('access-control-allow-origin'), + 'app://-' + ) }) it('sets CORS headers for requests from Station Desktop in development', async () => { const res = await fetch(new URL('/', baseUrl), { @@ -722,40 +1225,116 @@ describe('HTTP request handler', () => { origin: 'http://localhost:3000' } }) - assert.strictEqual(res.headers.get('access-control-allow-origin'), 'http://localhost:3000') + assert.strictEqual( + res.headers.get('access-control-allow-origin'), + 'http://localhost:3000' + ) }) }) describe('GET /miner/{id}/retrieval-success-rate/summary', () => { it('lists daily retrieval stats summary for specified miner in given date range', async () => { // before the range - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1one', total: 10, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1two', total: 100, successful: 20, successfulHttp: 10, successfulHttpHead: 10 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-09', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-09', + minerId: 'f1two', + total: 100, + successful: 20, + successfulHttp: 10, + successfulHttpHead: 10 + }) // in the range - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1one', total: 20, successful: 1, successfulHttp: 0, successfulHttpHead: 0 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1two', total: 200, successful: 60, successfulHttp: 50, successfulHttpHead: 50 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', total: 10, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', total: 100, successful: 50, successfulHttp: 35, successfulHttpHead: 35 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + minerId: 'f1one', + total: 20, + successful: 1, + successfulHttp: 0, + successfulHttpHead: 0 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + minerId: 'f1two', + total: 200, + successful: 60, + successfulHttp: 50, + successfulHttpHead: 50 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1one', + total: 10, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1two', + total: 100, + successful: 50, + successfulHttp: 35, + successfulHttpHead: 35 + }) // after the range - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1one', total: 30, successful: 1, successfulHttp: 1, successfulHttpHead: 1 }) - await givenRetrievalStats(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1two', total: 300, successful: 60, successfulHttp: 60, successfulHttpHead: 60 }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + minerId: 'f1one', + total: 30, + successful: 1, + successfulHttp: 1, + successfulHttpHead: 1 + }) + await givenRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + minerId: 'f1two', + total: 300, + successful: 60, + successfulHttp: 60, + successfulHttpHead: 60 + }) const res = await fetch( new URL( '/miner/f1one/retrieval-success-rate/summary?from=2024-01-10&to=2024-01-20', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ - { day: '2024-01-10', success_rate: 1 / 10, total: '10', successful: '1', successful_http: '1', success_rate_http: 1 / 10, success_rate_http_head: 1 / 10 }, - { day: '2024-01-20', success_rate: 1 / 20, total: '20', successful: '1', successful_http: '0', success_rate_http: 0, success_rate_http_head: 0 } + { + day: '2024-01-10', + success_rate: 1 / 10, + total: '10', + successful: '1', + successful_http: '1', + success_rate_http: 1 / 10, + success_rate_http_head: 1 / 10 + }, + { + day: '2024-01-20', + success_rate: 1 / 20, + total: '20', + successful: '1', + successful_http: '0', + success_rate_http: 0, + success_rate_http_head: 0 + } ]) }) }) @@ -763,17 +1342,49 @@ describe('HTTP request handler', () => { describe('miner retrieval timing stats', () => { beforeEach(async () => { // before the range - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1one', timeToFirstByteP50: [1000] }) - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1two', timeToFirstByteP50: [1000] }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-09', + minerId: 'f1one', + timeToFirstByteP50: [1000] + }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-09', + minerId: 'f1two', + timeToFirstByteP50: [1000] + }) // in the range - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1one', timeToFirstByteP50: [1000] }) - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1two', timeToFirstByteP50: [1000] }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-20', + minerId: 'f1one', + timeToFirstByteP50: [1000] + }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-20', + minerId: 'f1two', + timeToFirstByteP50: [1000] + }) - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', timeToFirstByteP50: [123, 345] }) - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', timeToFirstByteP50: [654, 789] }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1one', + timeToFirstByteP50: [123, 345] + }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-10', + minerId: 'f1two', + timeToFirstByteP50: [654, 789] + }) // after the range - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1one', timeToFirstByteP50: [1000] }) - await givenRetrievalTimings(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1two', timeToFirstByteP50: [1000] }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-21', + minerId: 'f1one', + timeToFirstByteP50: [1000] + }) + await givenRetrievalTimings(pgPools.evaluate, { + day: '2024-01-21', + minerId: 'f1two', + timeToFirstByteP50: [1000] + }) }) it('lists daily retrieval timings in given date range', async () => { @@ -781,13 +1392,14 @@ describe('HTTP request handler', () => { new URL( '/retrieval-timings/daily?from=2024-01-10&to=2024-01-20', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ @@ -801,13 +1413,14 @@ describe('HTTP request handler', () => { new URL( '/miner/f1one/retrieval-timings/summary?from=2024-01-10&to=2024-01-20', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ @@ -821,13 +1434,14 @@ describe('HTTP request handler', () => { new URL( '/miners/retrieval-timings/summary?from=2024-01-10&to=2024-01-20', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ @@ -837,173 +1451,463 @@ describe('HTTP request handler', () => { }) describe('GET /clients/retrieval-success-rate/summary', () => { beforeEach(async () => { - await pgPools.evaluate.query('DELETE FROM daily_client_retrieval_stats') + await pgPools.evaluate.query( + 'DELETE FROM daily_client_retrieval_stats' + ) }) it('returns a summary of clients RSR for the given date range', async () => { // before the range - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-10', clientId: 'f1oneClient', total: 10, successful: 1, successfulHttp: 1 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-10', clientId: 'f1twoClient', total: 100, successful: 20, successfulHttp: 10 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + clientId: 'f1oneClient', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + clientId: 'f1twoClient', + total: 100, + successful: 20, + successfulHttp: 10 + }) // in the range - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-11', clientId: 'f1oneClient', total: 20, successful: 1, successfulHttp: 0 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-11', clientId: 'f1twoClient', total: 200, successful: 150, successfulHttp: 100 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + clientId: 'f1oneClient', + total: 20, + successful: 1, + successfulHttp: 0 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + clientId: 'f1twoClient', + total: 200, + successful: 150, + successfulHttp: 100 + }) // after the range - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-12', clientId: 'f1oneClient', total: 30, successful: 1, successfulHttp: 1 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-12', clientId: 'f1twoClient', total: 300, successful: 60, successfulHttp: 60 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + clientId: 'f1oneClient', + total: 30, + successful: 1, + successfulHttp: 1 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + clientId: 'f1twoClient', + total: 300, + successful: 60, + successfulHttp: 60 + }) const res = await fetch( new URL( '/clients/retrieval-success-rate/summary?from=2024-01-11&to=2024-01-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { client_id: 'f1oneClient', success_rate: 0.05, total: '20', successful: '1', successful_http: '0', success_rate_http: 0 }, - { client_id: 'f1twoClient', success_rate: 0.75, total: '200', successful: '150', successful_http: '100', success_rate_http: 100 / 200 } + { + client_id: 'f1oneClient', + success_rate: 0.05, + total: '20', + successful: '1', + successful_http: '0', + success_rate_http: 0 + }, + { + client_id: 'f1twoClient', + success_rate: 0.75, + total: '200', + successful: '150', + successful_http: '100', + success_rate_http: 100 / 200 + } ]) }) it('handles total value being smaller or equal to 0', async () => { - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-11', clientId: 'f1oneClient', total: 0, successful: 0, successfulHttp: 0 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-11', clientId: 'f2twoClient', total: -1, successful: 0, successfulHttp: 0 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + clientId: 'f1oneClient', + total: 0, + successful: 0, + successfulHttp: 0 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + clientId: 'f2twoClient', + total: -1, + successful: 0, + successfulHttp: 0 + }) const res = await fetch( new URL( 'clients/retrieval-success-rate/summary?from=2024-01-11&to=2024-01-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { client_id: 'f1oneClient', success_rate: null, total: '0', successful: '0', successful_http: '0', success_rate_http: null }, - { client_id: 'f2twoClient', success_rate: null, total: '-1', successful: '0', successful_http: '0', success_rate_http: null } + { + client_id: 'f1oneClient', + success_rate: null, + total: '0', + successful: '0', + successful_http: '0', + success_rate_http: null + }, + { + client_id: 'f2twoClient', + success_rate: null, + total: '-1', + successful: '0', + successful_http: '0', + success_rate_http: null + } ]) }) }) describe('GET /client/{id}/retrieval-success-rate/summary', () => { beforeEach(async () => { - await pgPools.evaluate.query('DELETE FROM daily_client_retrieval_stats') + await pgPools.evaluate.query( + 'DELETE FROM daily_client_retrieval_stats' + ) }) it('lists daily retrieval stats summary for specified client in given date range', async () => { // before the range - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-09', clientId: 'f1oneClient', total: 10, successful: 1, successfulHttp: 1 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-09', clientId: 'f1twoClient', total: 100, successful: 20, successfulHttp: 10 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-09', + clientId: 'f1oneClient', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-09', + clientId: 'f1twoClient', + total: 100, + successful: 20, + successfulHttp: 10 + }) // in the range - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-20', clientId: 'f1oneClient', total: 20, successful: 1, successfulHttp: 0 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-20', clientId: 'f1twoClient', total: 200, successful: 60, successfulHttp: 50 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-10', clientId: 'f1oneClient', total: 10, successful: 1, successfulHttp: 1 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-10', clientId: 'f1twoClient', total: 100, successful: 50, successfulHttp: 35 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + clientId: 'f1oneClient', + total: 20, + successful: 1, + successfulHttp: 0 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + clientId: 'f1twoClient', + total: 200, + successful: 60, + successfulHttp: 50 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + clientId: 'f1oneClient', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + clientId: 'f1twoClient', + total: 100, + successful: 50, + successfulHttp: 35 + }) // after the range - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-21', clientId: 'f1oneClient', total: 30, successful: 1, successfulHttp: 1 }) - await givenClientRetrievalStats(pgPools.evaluate, { day: '2024-01-21', clientId: 'f1twoClient', total: 300, successful: 60, successfulHttp: 60 }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + clientId: 'f1oneClient', + total: 30, + successful: 1, + successfulHttp: 1 + }) + await givenClientRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + clientId: 'f1twoClient', + total: 300, + successful: 60, + successfulHttp: 60 + }) const res = await fetch( new URL( '/client/f1oneClient/retrieval-success-rate/summary?from=2024-01-10&to=2024-01-20', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ - { day: '2024-01-10', success_rate: 1 / 10, total: '10', successful: '1', successful_http: '1', success_rate_http: 1 / 10 }, - { day: '2024-01-20', success_rate: 1 / 20, total: '20', successful: '1', successful_http: '0', success_rate_http: 0 } + { + day: '2024-01-10', + success_rate: 1 / 10, + total: '10', + successful: '1', + successful_http: '1', + success_rate_http: 1 / 10 + }, + { + day: '2024-01-20', + success_rate: 1 / 20, + total: '20', + successful: '1', + successful_http: '0', + success_rate_http: 0 + } ]) }) }) describe('GET /allocators/retrieval-success-rate/summary', () => { beforeEach(async () => { - await pgPools.evaluate.query('DELETE FROM daily_allocator_retrieval_stats') + await pgPools.evaluate.query( + 'DELETE FROM daily_allocator_retrieval_stats' + ) }) it('returns a summary of allocators RSR for the given date range', async () => { // before the range - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-10', allocatorId: 'f1oneAllocator', total: 10, successful: 1, successfulHttp: 1 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-10', allocatorId: 'f1twoAllocator', total: 100, successful: 20, successfulHttp: 10 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + allocatorId: 'f1oneAllocator', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + allocatorId: 'f1twoAllocator', + total: 100, + successful: 20, + successfulHttp: 10 + }) // in the range - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-11', allocatorId: 'f1oneAllocator', total: 20, successful: 1, successfulHttp: 0 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-11', allocatorId: 'f2twoAllocator', total: 200, successful: 150, successfulHttp: 100 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + allocatorId: 'f1oneAllocator', + total: 20, + successful: 1, + successfulHttp: 0 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + allocatorId: 'f2twoAllocator', + total: 200, + successful: 150, + successfulHttp: 100 + }) // after the range - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-12', allocatorId: 'f1oneAllocator', total: 30, successful: 1, successfulHttp: 1 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-12', allocatorId: 'f2twoAllocator', total: 300, successful: 60, successfulHttp: 60 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + allocatorId: 'f1oneAllocator', + total: 30, + successful: 1, + successfulHttp: 1 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-12', + allocatorId: 'f2twoAllocator', + total: 300, + successful: 60, + successfulHttp: 60 + }) const res = await fetch( new URL( '/allocators/retrieval-success-rate/summary?from=2024-01-11&to=2024-01-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { allocator_id: 'f1oneAllocator', success_rate: 0.05, total: '20', successful: '1', successful_http: '0', success_rate_http: 0 }, - { allocator_id: 'f2twoAllocator', success_rate: 0.75, total: '200', successful: '150', successful_http: '100', success_rate_http: 100 / 200 } + { + allocator_id: 'f1oneAllocator', + success_rate: 0.05, + total: '20', + successful: '1', + successful_http: '0', + success_rate_http: 0 + }, + { + allocator_id: 'f2twoAllocator', + success_rate: 0.75, + total: '200', + successful: '150', + successful_http: '100', + success_rate_http: 100 / 200 + } ]) }) it('handles total value being smaller or equal to 0', async () => { - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-11', allocatorId: 'f1oneAllocator', total: 0, successful: 0, successfulHttp: 0 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-11', allocatorId: 'f2twoAllocator', total: -1, successful: 0, successfulHttp: 0 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + allocatorId: 'f1oneAllocator', + total: 0, + successful: 0, + successfulHttp: 0 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-11', + allocatorId: 'f2twoAllocator', + total: -1, + successful: 0, + successfulHttp: 0 + }) const res = await fetch( new URL( 'allocators/retrieval-success-rate/summary?from=2024-01-11&to=2024-01-11', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const stats = await res.json() assert.deepStrictEqual(stats, [ - { allocator_id: 'f1oneAllocator', success_rate: null, total: '0', successful: '0', successful_http: '0', success_rate_http: null }, - { allocator_id: 'f2twoAllocator', success_rate: null, total: '-1', successful: '0', successful_http: '0', success_rate_http: null } + { + allocator_id: 'f1oneAllocator', + success_rate: null, + total: '0', + successful: '0', + successful_http: '0', + success_rate_http: null + }, + { + allocator_id: 'f2twoAllocator', + success_rate: null, + total: '-1', + successful: '0', + successful_http: '0', + success_rate_http: null + } ]) }) }) describe('GET /allocator/{id}/retrieval-success-rate/summary', () => { beforeEach(async () => { - await pgPools.evaluate.query('DELETE FROM daily_allocator_retrieval_stats') + await pgPools.evaluate.query( + 'DELETE FROM daily_allocator_retrieval_stats' + ) }) it('lists daily retrieval stats summary for specified allocator in given date range', async () => { // before the range - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-09', allocatorId: 'f1oneAllocator', total: 10, successful: 1, successfulHttp: 1 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-09', allocatorId: 'f2twoAllocator', total: 100, successful: 20, successfulHttp: 10 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-09', + allocatorId: 'f1oneAllocator', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-09', + allocatorId: 'f2twoAllocator', + total: 100, + successful: 20, + successfulHttp: 10 + }) // in the range - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-20', allocatorId: 'f1oneAllocator', total: 20, successful: 1, successfulHttp: 0 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-20', allocatorId: 'f2twoAllocator', total: 200, successful: 60, successfulHttp: 50 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-10', allocatorId: 'f1oneAllocator', total: 10, successful: 1, successfulHttp: 1 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-10', allocatorId: 'f2twoAllocator', total: 100, successful: 50, successfulHttp: 35 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + allocatorId: 'f1oneAllocator', + total: 20, + successful: 1, + successfulHttp: 0 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-20', + allocatorId: 'f2twoAllocator', + total: 200, + successful: 60, + successfulHttp: 50 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + allocatorId: 'f1oneAllocator', + total: 10, + successful: 1, + successfulHttp: 1 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-10', + allocatorId: 'f2twoAllocator', + total: 100, + successful: 50, + successfulHttp: 35 + }) // after the range - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-21', allocatorId: 'f1oneAllocator', total: 30, successful: 1, successfulHttp: 1 }) - await givenAllocatorRetrievalStats(pgPools.evaluate, { day: '2024-01-21', allocatorId: 'f2twoAllocator', total: 300, successful: 60, successfulHttp: 60 }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + allocatorId: 'f1oneAllocator', + total: 30, + successful: 1, + successfulHttp: 1 + }) + await givenAllocatorRetrievalStats(pgPools.evaluate, { + day: '2024-01-21', + allocatorId: 'f2twoAllocator', + total: 300, + successful: 60, + successfulHttp: 60 + }) const res = await fetch( new URL( '/allocator/f1oneAllocator/retrieval-success-rate/summary?from=2024-01-10&to=2024-01-20', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) - const stats = /** @type {{ day: string, success_rate: number }[]} */( + const stats = /** @type {{ day: string, success_rate: number }[]} */ ( await res.json() ) assert.deepStrictEqual(stats, [ - { day: '2024-01-10', success_rate: 1 / 10, total: '10', successful: '1', successful_http: '1', success_rate_http: 1 / 10 }, - { day: '2024-01-20', success_rate: 1 / 20, total: '20', successful: '1', successful_http: '0', success_rate_http: 0 } + { + day: '2024-01-10', + success_rate: 1 / 10, + total: '10', + successful: '1', + successful_http: '1', + success_rate_http: 1 / 10 + }, + { + day: '2024-01-20', + success_rate: 1 / 20, + total: '20', + successful: '1', + successful_http: '0', + success_rate_http: 0 + } ]) }) }) @@ -1021,10 +1925,20 @@ describe('HTTP request handler', () => { * @param {number | bigint} [data.successfulHttp] * @param {number | bigint} [data.successfulHttpHead] */ -const givenRetrievalStats = async (pgPool, { day, minerId, total, successful, successfulHttp, successfulHttpHead }) => { +const givenRetrievalStats = async ( + pgPool, + { day, minerId, total, successful, successfulHttp, successfulHttpHead } +) => { await pgPool.query( 'INSERT INTO retrieval_stats (day, miner_id, total, successful, successful_http, successful_http_head) VALUES ($1, $2, $3, $4, $5, $6)', - [day, minerId ?? 'f1test', total, successful, successfulHttp, successfulHttpHead] + [ + day, + minerId ?? 'f1test', + total, + successful, + successfulHttp, + successfulHttpHead + ] ) } @@ -1038,7 +1952,10 @@ const givenRetrievalStats = async (pgPool, { day, minerId, total, successful, su * @param {number | bigint } data.successful * @param {number | bigint} [data.successfulHttp] */ -const givenClientRetrievalStats = async (pgPool, { day, clientId, total, successful, successfulHttp }) => { +const givenClientRetrievalStats = async ( + pgPool, + { day, clientId, total, successful, successfulHttp } +) => { await pgPool.query( 'INSERT INTO daily_client_retrieval_stats (day, client_id, total, successful, successful_http) VALUES ($1, $2, $3, $4, $5)', [day, clientId ?? 'f1ClientTest', total, successful, successfulHttp] @@ -1055,7 +1972,10 @@ const givenClientRetrievalStats = async (pgPool, { day, clientId, total, success * @param {number | bigint } data.successful * @param {number | bigint} [data.successfulHttp] */ -const givenAllocatorRetrievalStats = async (pgPool, { day, allocatorId, total, successful, successfulHttp }) => { +const givenAllocatorRetrievalStats = async ( + pgPool, + { day, allocatorId, total, successful, successfulHttp } +) => { await pgPool.query( 'INSERT INTO daily_allocator_retrieval_stats (day, allocator_id, total, successful, successful_http) VALUES ($1, $2, $3, $4, $5)', [day, allocatorId ?? 'f1AllocatorTest', total, successful, successfulHttp] @@ -1077,17 +1997,20 @@ const givenAllocatorRetrievalStats = async (pgPool, { day, allocatorId, total, s * retrievable?: number; * }} stats */ -const givenDailyDealStats = async (pgPool, { - day, - minerId, - clientId, - tested, - indexMajorityFound, - indexed, - indexedHttp, - retrievalMajorityFound, - retrievable -}) => { +const givenDailyDealStats = async ( + pgPool, + { + day, + minerId, + clientId, + tested, + indexMajorityFound, + indexed, + indexedHttp, + retrievalMajorityFound, + retrievable + } +) => { indexed ??= tested indexedHttp ??= indexed indexMajorityFound ??= indexed @@ -1095,7 +2018,8 @@ const givenDailyDealStats = async (pgPool, { retrievable ??= tested retrievalMajorityFound ??= retrievable - await pgPool.query(` + await pgPool.query( + ` INSERT INTO daily_deals ( day, miner_id, @@ -1107,17 +2031,19 @@ const givenDailyDealStats = async (pgPool, { retrieval_majority_found, retrievable ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) - `, [ - day, - minerId ?? 'f1miner', - clientId ?? 'f1client', - tested, - indexMajorityFound, - indexed, - indexedHttp, - retrievalMajorityFound, - retrievable - ]) + `, + [ + day, + minerId ?? 'f1miner', + clientId ?? 'f1client', + tested, + indexMajorityFound, + indexed, + indexedHttp, + retrievalMajorityFound, + retrievable + ] + ) } /** @@ -1128,7 +2054,10 @@ const givenDailyDealStats = async (pgPool, { * @param {string} data.minerId * @param {number[]} data.timeToFirstByteP50 */ -const givenRetrievalTimings = async (pgPool, { day, minerId, timeToFirstByteP50 }) => { +const givenRetrievalTimings = async ( + pgPool, + { day, minerId, timeToFirstByteP50 } +) => { await pgPool.query( 'INSERT INTO retrieval_timings (day, miner_id, ttfb_p50) VALUES ($1, $2, $3)', [day, minerId ?? 'f1test', timeToFirstByteP50] diff --git a/stats/test/platform-routes.test.js b/stats/test/platform-routes.test.js index d1542dd..2dc699a 100644 --- a/stats/test/platform-routes.test.js +++ b/stats/test/platform-routes.test.js @@ -3,8 +3,17 @@ import { getPgPools } from '@filecoin-station/spark-stats-db' import { assertResponseStatus } from './test-helpers.js' import { createApp } from '../lib/app.js' -import { getLocalDayAsISOString, today, yesterday } from '../lib/request-helpers.js' -import { givenDailyParticipants, givenDailyDesktopUsers } from '@filecoin-station/spark-stats-db/test-helpers.js' +import { + getLocalDayAsISOString, + today, + yesterday +} from '../lib/request-helpers.js' +import { + givenDailyParticipants, + givenDailyDesktopUsers, + givenScheduledRewards, + givenRewardTransfer +} from '@filecoin-station/spark-stats-db/test-helpers.js' describe('Platform Routes HTTP request handler', () => { /** @type {import('@filecoin-station/spark-stats-db').PgPools} */ @@ -20,9 +29,10 @@ describe('Platform Routes HTTP request handler', () => { SPARK_API_BASE_URL: 'https://api.filspark.com/', pgPools, logger: { - level: process.env.DEBUG === '*' || process.env.DEBUG?.includes('test') - ? 'debug' - : 'error' + level: + process.env.DEBUG === '*' || process.env.DEBUG?.includes('test') + ? 'debug' + : 'error' } }) @@ -42,7 +52,9 @@ describe('Platform Routes HTTP request handler', () => { await pgPools.evaluate.query('DELETE FROM monthly_active_station_count') await pgPools.evaluate.query('DELETE FROM daily_platform_stats') - await pgPools.evaluate.query('REFRESH MATERIALIZED VIEW top_measurement_participants_yesterday_mv') + await pgPools.evaluate.query( + 'REFRESH MATERIALIZED VIEW top_measurement_participants_yesterday_mv' + ) await pgPools.stats.query('DELETE FROM daily_reward_transfers') await pgPools.stats.query('DELETE FROM daily_scheduled_rewards') @@ -59,10 +71,8 @@ describe('Platform Routes HTTP request handler', () => { ]) const res = await fetch( - new URL( - '/stations/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/stations/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) @@ -86,10 +96,8 @@ describe('Platform Routes HTTP request handler', () => { await givenMonthlyActiveStationCount(pgPools.evaluate, '2024-03-01', 5) const res = await fetch( - new URL( - '/stations/monthly?from=2024-01-11&to=2024-02-11', - baseUrl - ), { + new URL('/stations/monthly?from=2024-01-11&to=2024-02-11', baseUrl), + { redirect: 'manual' } ) @@ -105,25 +113,47 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /measurements/daily', () => { it('returns daily total accepted measurement count for the given date range', async () => { await givenDailyMeasurementsSummary(pgPools.evaluate, [ - { day: '2024-01-10', accepted_measurement_count: 5, total_measurement_count: 6 }, - { day: '2024-01-11', accepted_measurement_count: 1, total_measurement_count: 2 }, - { day: '2024-01-12', accepted_measurement_count: 3, total_measurement_count: 4 }, - { day: '2024-01-13', accepted_measurement_count: 7, total_measurement_count: 8 } + { + day: '2024-01-10', + accepted_measurement_count: 5, + total_measurement_count: 6 + }, + { + day: '2024-01-11', + accepted_measurement_count: 1, + total_measurement_count: 2 + }, + { + day: '2024-01-12', + accepted_measurement_count: 3, + total_measurement_count: 4 + }, + { + day: '2024-01-13', + accepted_measurement_count: 7, + total_measurement_count: 8 + } ]) const res = await fetch( - new URL( - '/measurements/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/measurements/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const metrics = await res.json() assert.deepStrictEqual(metrics, [ - { day: '2024-01-11', accepted_measurement_count: 1, total_measurement_count: 2 }, - { day: '2024-01-12', accepted_measurement_count: 3, total_measurement_count: 4 } + { + day: '2024-01-11', + accepted_measurement_count: 1, + total_measurement_count: 2 + }, + { + day: '2024-01-12', + accepted_measurement_count: 3, + total_measurement_count: 4 + } ]) }) }) @@ -139,7 +169,8 @@ describe('Platform Routes HTTP request handler', () => { (3, 'f1mnopqr') `) - await pgPools.evaluate.query(` + await pgPools.evaluate.query( + ` INSERT INTO recent_station_details (day, participant_id, station_id, accepted_measurement_count, total_measurement_count) VALUES ($1, 1, 'station1', 20, 25), ($1, 1, 'station2', 20, 25), @@ -147,9 +178,12 @@ describe('Platform Routes HTTP request handler', () => { ($1, 2, 'station4', 50, 55), ($1, 2, 'station5', 40, 45), ($1, 3, 'station6', 10, 15) - `, [day]) + `, + [day] + ) - await pgPools.evaluate.query(` + await pgPools.evaluate.query( + ` INSERT INTO recent_participant_subnets (day, participant_id, subnet) VALUES ($1, 1, 'subnet1'), ($1, 1, 'subnet2'), @@ -157,42 +191,49 @@ describe('Platform Routes HTTP request handler', () => { ($1, 2, 'subnet4'), ($1, 2, 'subnet5'), ($1, 3, 'subnet6') - `, [day]) + `, + [day] + ) // Refresh the materialized view - await pgPools.evaluate.query('REFRESH MATERIALIZED VIEW top_measurement_participants_yesterday_mv') + await pgPools.evaluate.query( + 'REFRESH MATERIALIZED VIEW top_measurement_participants_yesterday_mv' + ) const res = await fetch( new URL( '/participants/top-measurements?from=yesterday&to=yesterday', baseUrl - ), { + ), + { redirect: 'manual' } ) await assertResponseStatus(res, 200) const metrics = await res.json() - assert.deepStrictEqual(metrics, [{ - day, - participant_address: 'f1ghijkl', - inet_group_count: '2', - station_count: '2', - accepted_measurement_count: '90' - }, - { - day, - participant_address: 'f1abcdef', - inet_group_count: '3', - station_count: '3', - accepted_measurement_count: '50' - }, - { - day, - participant_address: 'f1mnopqr', - inet_group_count: '1', - station_count: '1', - accepted_measurement_count: '10' - }]) + assert.deepStrictEqual(metrics, [ + { + day, + participant_address: 'f1ghijkl', + inet_group_count: '2', + station_count: '2', + accepted_measurement_count: '90' + }, + { + day, + participant_address: 'f1abcdef', + inet_group_count: '3', + station_count: '3', + accepted_measurement_count: '50' + }, + { + day, + participant_address: 'f1mnopqr', + inet_group_count: '1', + station_count: '1', + accepted_measurement_count: '10' + } + ]) }) it('returns 400 if the date range is more than one day', async () => { @@ -200,7 +241,8 @@ describe('Platform Routes HTTP request handler', () => { new URL( '/participants/top-measurements?from=2024-01-11&to=2024-01-12', baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -208,27 +250,23 @@ describe('Platform Routes HTTP request handler', () => { }) }) + beforeEach(async () => { + await pgPools.stats.query('DELETE FROM daily_reward_transfers') + await pgPools.stats.query('DELETE FROM daily_scheduled_rewards') + await pgPools.stats.query('DELETE FROM participants') + }) + describe('GET /transfers/daily', () => { it('returns daily total Rewards sent for the given date range', async () => { - await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-10', [ - { toAddress: 'to1', amount: 100, lastCheckedBlock: 1 } - ]) - await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-11', [ - { toAddress: 'to2', amount: 150, lastCheckedBlock: 1 } - ]) - await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-12', [ - { toAddress: 'to2', amount: 300, lastCheckedBlock: 1 }, - { toAddress: 'to3', amount: 250, lastCheckedBlock: 1 } - ]) - await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-13', [ - { toAddress: 'to1', amount: 100, lastCheckedBlock: 1 } - ]) + await givenRewardTransfer(pgPools.stats, '2024-01-10', 'to1', 100, 1) + await givenRewardTransfer(pgPools.stats, '2024-01-11', 'to2', 150, 1) + await givenRewardTransfer(pgPools.stats, '2024-01-12', 'to2', 300, 1) + await givenRewardTransfer(pgPools.stats, '2024-01-12', 'to3', 250, 1) + await givenRewardTransfer(pgPools.stats, '2024-01-13', 'to1', 100, 1) const res = await fetch( - new URL( - '/transfers/daily?from=2024-01-11&to=2024-01-12', - baseUrl - ), { + new URL('/transfers/daily?from=2024-01-11&to=2024-01-12', baseUrl), + { redirect: 'manual' } ) @@ -263,10 +301,8 @@ describe('Platform Routes HTTP request handler', () => { }) it('returns 400 if the date range is more than 31 days', async () => { const res = await fetch( - new URL( - '/transfers/daily?from=2024-01-01&to=2024-02-02', - baseUrl - ), { + new URL('/transfers/daily?from=2024-01-01&to=2024-02-02', baseUrl), + { redirect: 'manual' } ) @@ -274,41 +310,51 @@ describe('Platform Routes HTTP request handler', () => { }) }) + beforeEach(async () => { + await pgPools.stats.query('DELETE FROM daily_reward_transfers') + await pgPools.stats.query('DELETE FROM daily_scheduled_rewards') + await pgPools.stats.query('DELETE FROM participants') + }) + describe('GET /participants/top-earning', () => { - const oneWeekAgo = getLocalDayAsISOString(new Date(Date.now() - 7 * 24 * 60 * 60 * 1000)) + const oneWeekAgo = getLocalDayAsISOString( + new Date(Date.now() - 7 * 24 * 60 * 60 * 1000) + ) const setupScheduledRewardsData = async () => { - await pgPools.stats.query(` - INSERT INTO daily_scheduled_rewards (day, participant_address, scheduled_rewards) - VALUES - ('${yesterday()}', 'address1', 10), - ('${yesterday()}', 'address2', 20), - ('${yesterday()}', 'address3', 30), - ('${today()}', 'address1', 15), - ('${today()}', 'address2', 25), - ('${today()}', 'address3', 35) - `) + await givenScheduledRewards( + pgPools.stats, + yesterday(), + new Map([ + ['address1', 10], + ['address2', 20], + ['address3', 30] + ]) + ) + await givenScheduledRewards( + pgPools.stats, + today(), + new Map([ + ['address1', 15], + ['address2', 25], + ['address3', 35] + ]) + ) } it('returns top earning participants for the given date range', async () => { // First two dates should be ignored - await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-09', [ - { toAddress: 'address1', amount: 100, lastCheckedBlock: 1 }, - { toAddress: 'address2', amount: 100, lastCheckedBlock: 1 }, - { toAddress: 'address3', amount: 100, lastCheckedBlock: 1 } - ]) - await givenDailyRewardTransferMetrics(pgPools.stats, '2024-01-10', [ - { toAddress: 'address1', amount: 100, lastCheckedBlock: 1 } - ]) + await givenRewardTransfer(pgPools.stats, '2024-01-09', 'address1', 100, 1) + await givenRewardTransfer(pgPools.stats, '2024-01-09', 'address2', 100, 1) + await givenRewardTransfer(pgPools.stats, '2024-01-09', 'address3', 100, 1) + + await givenRewardTransfer(pgPools.stats, '2024-01-10', 'address1', 100, 1) // These should be included in the results - await givenDailyRewardTransferMetrics(pgPools.stats, oneWeekAgo, [ - { toAddress: 'address2', amount: 150, lastCheckedBlock: 1 }, - { toAddress: 'address1', amount: 50, lastCheckedBlock: 1 } - ]) - await givenDailyRewardTransferMetrics(pgPools.stats, today(), [ - { toAddress: 'address3', amount: 200, lastCheckedBlock: 1 }, - { toAddress: 'address2', amount: 100, lastCheckedBlock: 1 } - ]) + await givenRewardTransfer(pgPools.stats, oneWeekAgo, 'address2', 150, 1) + await givenRewardTransfer(pgPools.stats, oneWeekAgo, 'address1', 50, 1) + + await givenRewardTransfer(pgPools.stats, today(), 'address3', 200, 1) + await givenRewardTransfer(pgPools.stats, today(), 'address2', 100, 1) // Set up scheduled rewards data await setupScheduledRewardsData() @@ -317,7 +363,8 @@ describe('Platform Routes HTTP request handler', () => { new URL( `/participants/top-earning?from=${oneWeekAgo}&to=${today()}`, baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -332,15 +379,14 @@ describe('Platform Routes HTTP request handler', () => { it('returns top earning participants for the given date range with no existing reward transfers', async () => { await setupScheduledRewardsData() - await givenDailyRewardTransferMetrics(pgPools.stats, today(), [ - { toAddress: 'address1', amount: 100, lastCheckedBlock: 1 } - ]) + await givenRewardTransfer(pgPools.stats, today(), 'address1', 100, 1) const res = await fetch( new URL( `/participants/top-earning?from=${oneWeekAgo}&to=${today()}`, baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -357,7 +403,8 @@ describe('Platform Routes HTTP request handler', () => { new URL( `/participants/top-earning?from=${oneWeekAgo}&to=yesterday`, baseUrl - ), { + ), + { redirect: 'manual' } ) @@ -367,17 +414,15 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /participants/summary', () => { it('counts participants', async () => { - await givenDailyParticipants( - pgPools.evaluate, - '2000-01-01', - ['0x1', '0x2', '0x3'] - ) + await givenDailyParticipants(pgPools.evaluate, '2000-01-01', [ + '0x1', + '0x2', + '0x3' + ]) - const res = await fetch( - new URL('/participants/summary', baseUrl), { - redirect: 'manual' - } - ) + const res = await fetch(new URL('/participants/summary', baseUrl), { + redirect: 'manual' + }) await assertResponseStatus(res, 200) const summary = await res.json() assert.deepStrictEqual(summary, { participant_count: 3 }) @@ -391,38 +436,37 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /participants/accumulative/daily', () => { it('counts accumulative daily participants', async () => { // 3 new participants, out of range - await givenDailyParticipants( - pgPools.evaluate, - '1999-01-01', - ['0x10', '0x20', '0x30'] - ) + await givenDailyParticipants(pgPools.evaluate, '1999-01-01', [ + '0x10', + '0x20', + '0x30' + ]) // 3 new participants, 1 old participant -> 6 - await givenDailyParticipants( - pgPools.evaluate, - '2000-01-01', - ['0x1', '0x2', '0x3', '0x10'] - ) + await givenDailyParticipants(pgPools.evaluate, '2000-01-01', [ + '0x1', + '0x2', + '0x3', + '0x10' + ]) // 0 new participants, 2 old participants - await givenDailyParticipants( - pgPools.evaluate, - '2000-01-02', - ['0x1', '0x2'] - ) + await givenDailyParticipants(pgPools.evaluate, '2000-01-02', [ + '0x1', + '0x2' + ]) // 1 new participant, 1 old participant -> 7 - await givenDailyParticipants( - pgPools.evaluate, - '2000-01-03', - ['0x1', '0x4'] - ) + await givenDailyParticipants(pgPools.evaluate, '2000-01-03', [ + '0x1', + '0x4' + ]) // 1 new participant, out of range - await givenDailyParticipants( - pgPools.evaluate, - '2000-01-04', - ['0x5'] - ) + await givenDailyParticipants(pgPools.evaluate, '2000-01-04', ['0x5']) const res = await fetch( - new URL('/participants/accumulative/daily?from=2000-01-01&to=2000-01-03', baseUrl), { + new URL( + '/participants/accumulative/daily?from=2000-01-01&to=2000-01-03', + baseUrl + ), + { redirect: 'manual' } ) @@ -442,31 +486,18 @@ describe('Platform Routes HTTP request handler', () => { describe('GET /stations/desktop/daily', () => { it('counts daily desktop users', async () => { // out of range - await givenDailyDesktopUsers( - pgPools.stats, - '1999-01-01', - 10 - ) + await givenDailyDesktopUsers(pgPools.stats, '1999-01-01', 10) // in range - await givenDailyDesktopUsers( - pgPools.stats, - '2000-01-01', - 30 - ) - await givenDailyDesktopUsers( - pgPools.stats, - '2000-01-03', - 20 - ) + await givenDailyDesktopUsers(pgPools.stats, '2000-01-01', 30) + await givenDailyDesktopUsers(pgPools.stats, '2000-01-03', 20) // out of range - await givenDailyDesktopUsers( - pgPools.stats, - '2000-01-04', - 10 - ) + await givenDailyDesktopUsers(pgPools.stats, '2000-01-04', 10) const res = await fetch( - new URL('/stations/desktop/daily?from=2000-01-01&to=2000-01-03', baseUrl) + new URL( + '/stations/desktop/daily?from=2000-01-01&to=2000-01-03', + baseUrl + ) ) await assertResponseStatus(res, 200) const daily = await res.json() @@ -479,7 +510,7 @@ describe('Platform Routes HTTP request handler', () => { }) const givenDailyMeasurementsSummary = async (pgPoolEvaluate, summaryData) => { - const processedSummaryData = summaryData.map(row => ({ + const processedSummaryData = summaryData.map((row) => ({ day: row.day, accepted_measurement_count: row.accepted_measurement_count ?? 100, total_measurement_count: row.total_measurement_count ?? 120, @@ -488,7 +519,8 @@ const givenDailyMeasurementsSummary = async (pgPoolEvaluate, summaryData) => { inet_group_count: row.inet_group_count ?? 8 })) - await pgPoolEvaluate.query(` + await pgPoolEvaluate.query( + ` INSERT INTO daily_platform_stats ( day, accepted_measurement_count, @@ -505,36 +537,29 @@ const givenDailyMeasurementsSummary = async (pgPoolEvaluate, summaryData) => { UNNEST($5::int[]) AS participant_address_count, UNNEST($6::int[]) AS inet_group_count ON CONFLICT DO NOTHING - `, [ - processedSummaryData.map(s => s.day), - processedSummaryData.map(s => s.accepted_measurement_count), - processedSummaryData.map(s => s.total_measurement_count), - processedSummaryData.map(s => s.station_count), - processedSummaryData.map(s => s.participant_address_count), - processedSummaryData.map(s => s.inet_group_count) - ]) + `, + [ + processedSummaryData.map((s) => s.day), + processedSummaryData.map((s) => s.accepted_measurement_count), + processedSummaryData.map((s) => s.total_measurement_count), + processedSummaryData.map((s) => s.station_count), + processedSummaryData.map((s) => s.participant_address_count), + processedSummaryData.map((s) => s.inet_group_count) + ] + ) } -const givenMonthlyActiveStationCount = async (pgPoolEvaluate, month, stationCount) => { - await pgPoolEvaluate.query(` +const givenMonthlyActiveStationCount = async ( + pgPoolEvaluate, + month, + stationCount +) => { + await pgPoolEvaluate.query( + ` INSERT INTO monthly_active_station_count (month, station_count) VALUES ($1, $2) ON CONFLICT DO NOTHING - `, [ - month, - stationCount - ]) -} - -const givenDailyRewardTransferMetrics = async (pgPoolStats, day, transferStats) => { - await pgPoolStats.query(` - INSERT INTO daily_reward_transfers (day, to_address, amount, last_checked_block) - SELECT $1 AS day, UNNEST($2::text[]) AS to_address, UNNEST($3::int[]) AS amount, UNNEST($4::int[]) AS last_checked_block - ON CONFLICT DO NOTHING - `, [ - day, - transferStats.map(s => s.toAddress), - transferStats.map(s => s.amount), - transferStats.map(s => s.lastCheckedBlock) - ]) + `, + [month, stationCount] + ) }