-
Notifications
You must be signed in to change notification settings - Fork 4
Use @fastify/fastify-postgres. CheckerNetwork/roadmap#220 #341
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from 7 commits
31a5f9b
6206573
51bfcad
3717210
973b0da
826d075
e89a63c
26d73c7
2608fdc
c0a95b2
6a76461
f6a0319
59448e4
9423683
97ac2bd
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||||
---|---|---|---|---|---|---|---|---|
|
@@ -21,4 +21,4 @@ | |||||||
"mocha" | ||||||||
] | ||||||||
} | ||||||||
} | ||||||||
} | ||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ping @Goddhi |
Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.
juliangruber marked this conversation as resolved.
Show resolved
Hide resolved
|
Original file line number | Diff line number | Diff line change | ||||||
---|---|---|---|---|---|---|---|---|
|
@@ -6,4 +6,4 @@ import { | |||||||
|
||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. please undo this change to keep the diff clean There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Alright, noted |
||||||||
const pgPools = await getPgPools() | ||||||||
await migrateStatsDB(pgPools.stats) | ||||||||
await migrateEvaluateDB(pgPools.evaluate) | ||||||||
await migrateEvaluateDB(pgPools.evaluate) | ||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
Original file line number | Diff line number | Diff line change | ||||||||
---|---|---|---|---|---|---|---|---|---|---|
@@ -1,23 +1,26 @@ | ||||||||||
import '../lib/instrument.js' | ||||||||||
import { createApp } from '../lib/app.js' | ||||||||||
import { getPgPools } from '@filecoin-station/spark-stats-db' | ||||||||||
|
||||||||||
const { | ||||||||||
PORT = '8080', | ||||||||||
HOST = '127.0.0.1', | ||||||||||
SPARK_API_BASE_URL = 'https://api.filspark.com/', | ||||||||||
REQUEST_LOGGING = 'true' | ||||||||||
REQUEST_LOGGING = 'true', | ||||||||||
DATABASE_URL, | ||||||||||
EVALUATE_DB_URL | ||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Let's add default values for both
Suggested change
|
||||||||||
} = process.env | ||||||||||
|
||||||||||
const pgPools = await getPgPools() | ||||||||||
|
||||||||||
const app = await createApp({ | ||||||||||
SPARK_API_BASE_URL, | ||||||||||
pgPools, | ||||||||||
DATABASE_URL, | ||||||||||
EVALUATE_DB_URL, | ||||||||||
logger: { | ||||||||||
level: ['1', 'true'].includes(REQUEST_LOGGING) ? 'info' : 'error' | ||||||||||
} | ||||||||||
}) | ||||||||||
|
||||||||||
console.log('Starting the http server on host %j port %s', HOST, PORT) | ||||||||||
const baseUrl = app.listen({ port: Number(PORT), host: HOST }) | ||||||||||
console.log(baseUrl) | ||||||||||
|
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -14,39 +14,39 @@ import { filterPreHandlerHook, filterOnSendHook } from './request-helpers.js' | |||||
|
||||||
/** @typedef {import('./typings.js').RequestWithFilter} RequestWithFilter */ | ||||||
|
||||||
export const addPlatformRoutes = (app, pgPools) => { | ||||||
|
||||||
export const addPlatformRoutes = (app) => { | ||||||
app.register(async app => { | ||||||
app.addHook('preHandler', filterPreHandlerHook) | ||||||
app.addHook('onSend', filterOnSendHook) | ||||||
|
||||||
app.get('/stations/daily', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchDailyStationCount(pgPools.evaluate, request.filter)) | ||||||
}) | ||||||
app.get('/stations/monthly', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchMonthlyStationCount(pgPools.evaluate, request.filter)) | ||||||
reply.send(await fetchMonthlyStationCount(request.server.pg, request.filter)) | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
We could also pass correct instance of the database instead of passing the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This throws an error reason why went with only the pg object, besides the fetcher functions expects full pg object and uses pg.stats or pg.evaluate interally. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What error does it throw? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ping @Goddhi |
||||||
}) | ||||||
app.get('/stations/desktop/daily', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchDailyDesktopUsers(pgPools.stats, request.filter)) | ||||||
reply.send(await fetchDailyDesktopUsers(request.server.pg, request.filter)) | ||||||
}) | ||||||
app.get('/measurements/daily', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchDailyStationMeasurementCounts(pgPools.evaluate, request.filter)) | ||||||
reply.send(await fetchDailyStationMeasurementCounts(request.server.pg, request.filter)) | ||||||
}) | ||||||
app.get('/participants/top-measurements', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchParticipantsWithTopMeasurements(pgPools.evaluate, request.filter)) | ||||||
reply.send(await fetchParticipantsWithTopMeasurements(request.server.pg, request.filter)) | ||||||
}) | ||||||
app.get('/participants/top-earning', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchTopEarningParticipants(pgPools.stats, request.filter)) | ||||||
}) | ||||||
reply.send(await fetchTopEarningParticipants(request.server.pg, request.filter)) }) | ||||||
|
||||||
app.get('/participants/accumulative/daily', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchAccumulativeDailyParticipantCount(pgPools.evaluate, request.filter)) | ||||||
reply.send(await fetchAccumulativeDailyParticipantCount(request.server.pg, request.filter)) | ||||||
}) | ||||||
app.get('/transfers/daily', async (/** @type {RequestWithFilter} */ request, reply) => { | ||||||
reply.send(await fetchDailyRewardTransfers(pgPools.stats, request.filter)) | ||||||
reply.send(await fetchDailyRewardTransfers(request.server.pg, request.filter)) | ||||||
}) | ||||||
}) | ||||||
|
||||||
app.get('/participants/summary', async (request, reply) => { | ||||||
reply.header('cache-control', `public, max-age=${24 * 3600 /* one day */}`) | ||||||
reply.send(await fetchParticipantsSummary(pgPools.evaluate)) | ||||||
reply.send(await fetchParticipantsSummary(request.server.pg.evaluate)) | ||||||
}) | ||||||
} |
Original file line number | Diff line number | Diff line change | ||||||||
---|---|---|---|---|---|---|---|---|---|---|
@@ -1,16 +1,24 @@ | ||||||||||
import assert from 'http-assert' | ||||||||||
import { today, yesterday } from './request-helpers.js' | ||||||||||
|
||||||||||
/** @typedef {import('@filecoin-station/spark-stats-db').Queryable} Queryable */ | ||||||||||
/** | ||||||||||
@typedef {import('./typings.js').DateRangeFilter} DateRangeFilter | ||||||||||
@typedef {import('@filecoin-station/spark-stats-db').Queryable} Queryable | ||||||||||
@typedef {import('./typings.js').FastifyPg} FastifyPg | ||||||||||
*/ | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {FastifyPg} pg - Fastify pg object with database connections | ||||||||||
*/ | ||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ping @Goddhi |
||||||||||
|
||||||||||
const ONE_DAY = 24 * 60 * 60 * 1000 | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchDailyStationCount = async (pgPool, filter) => { | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
export const fetchDailyStationCount = async (pg, filter) => { | ||||||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. In case we pass correct database instance we don't need to change this handler. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ping @Goddhi |
||||||||||
const { rows } = await pg.evaluate.query(` | ||||||||||
SELECT day::TEXT, station_count | ||||||||||
FROM daily_platform_stats | ||||||||||
WHERE day >= $1 AND day <= $2 | ||||||||||
|
@@ -20,11 +28,11 @@ export const fetchDailyStationCount = async (pgPool, filter) => { | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchMonthlyStationCount = async (pgPool, filter) => { | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
export const fetchMonthlyStationCount = async (pg, filter) => { | ||||||||||
const { rows } = await pg.evaluate.query(` | ||||||||||
SELECT month::TEXT, station_count | ||||||||||
FROM monthly_active_station_count | ||||||||||
WHERE | ||||||||||
|
@@ -36,11 +44,11 @@ export const fetchMonthlyStationCount = async (pgPool, filter) => { | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchDailyStationMeasurementCounts = async (pgPool, filter) => { | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
export const fetchDailyStationMeasurementCounts = async (pg, filter) => { | ||||||||||
const { rows } = await pg.evaluate.query(` | ||||||||||
SELECT day::TEXT, accepted_measurement_count, total_measurement_count | ||||||||||
FROM daily_platform_stats | ||||||||||
WHERE day >= $1 AND day <= $2 | ||||||||||
|
@@ -50,31 +58,31 @@ export const fetchDailyStationMeasurementCounts = async (pgPool, filter) => { | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchParticipantsWithTopMeasurements = async (pgPool, filter) => { | ||||||||||
export const fetchParticipantsWithTopMeasurements = async (pg, filter) => { | ||||||||||
assert(filter.to === filter.from, 400, 'Multi-day queries are not supported for this endpoint') | ||||||||||
assert(filter.to === yesterday(), 400, 'filter.to must be set to yesterday, other values are not supported yet') | ||||||||||
// Ignore the filter for this query | ||||||||||
// Get the top measurement stations from the Materialized View | ||||||||||
return (await pgPool.query(` | ||||||||||
return (await pg.evaluate.query(` | ||||||||||
SELECT day::TEXT, participant_address, station_count, accepted_measurement_count, inet_group_count | ||||||||||
FROM top_measurement_participants_yesterday_mv | ||||||||||
`)).rows | ||||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchDailyRewardTransfers = async (pgPool, filter) => { | ||||||||||
export const fetchDailyRewardTransfers = async (pg, filter) => { | ||||||||||
assert( | ||||||||||
new Date(filter.to).getTime() - new Date(filter.from).getTime() <= 31 * ONE_DAY, | ||||||||||
400, | ||||||||||
'Date range must be 31 days max' | ||||||||||
) | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
const { rows } = await pg.stats.query(` | ||||||||||
SELECT day::TEXT, to_address, amount | ||||||||||
FROM daily_reward_transfers | ||||||||||
WHERE day >= $1 AND day <= $2 | ||||||||||
|
@@ -99,11 +107,11 @@ export const fetchDailyRewardTransfers = async (pgPool, filter) => { | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchAccumulativeDailyParticipantCount = async (pgPool, filter) => { | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
export const fetchAccumulativeDailyParticipantCount = async (pg, filter) => { | ||||||||||
const { rows } = await pg.evaluate.query(` | ||||||||||
WITH first_appearance AS ( | ||||||||||
SELECT participant_id, MIN(day) as day | ||||||||||
FROM daily_participants | ||||||||||
|
@@ -126,15 +134,15 @@ export const fetchAccumulativeDailyParticipantCount = async (pgPool, filter) => | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchTopEarningParticipants = async (pgPool, filter) => { | ||||||||||
export const fetchTopEarningParticipants = async (pg, filter) => { | ||||||||||
// The query combines "transfers until filter.to" with "latest scheduled rewards as of today". | ||||||||||
// As a result, it produces incorrect result if `to` is different from `now()`. | ||||||||||
// See https://github.com/filecoin-station/spark-stats/pull/170#discussion_r1664080395 | ||||||||||
assert(filter.to === today(), 400, 'filter.to must be today, other values are not supported') | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
const { rows } = await pg.stats.query(` | ||||||||||
WITH latest_scheduled_rewards AS ( | ||||||||||
SELECT DISTINCT ON (participant_address) participant_address, scheduled_rewards | ||||||||||
FROM daily_scheduled_rewards | ||||||||||
|
@@ -154,10 +162,10 @@ export const fetchTopEarningParticipants = async (pgPool, filter) => { | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
*/ | ||||||||||
export const fetchParticipantsSummary = async (pgPool) => { | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
export const fetchParticipantsSummary = async (pg) => { | ||||||||||
const { rows } = await pg.evaluate.query(` | ||||||||||
SELECT COUNT(DISTINCT participant_id) FROM daily_participants | ||||||||||
`) | ||||||||||
return { | ||||||||||
|
@@ -166,11 +174,11 @@ export const fetchParticipantsSummary = async (pgPool) => { | |||||||||
} | ||||||||||
|
||||||||||
/** | ||||||||||
* @param {Queryable} pgPool | ||||||||||
* @param {FastifyPg} pg | ||||||||||
* @param {import('./typings.js').DateRangeFilter} filter | ||||||||||
*/ | ||||||||||
export const fetchDailyDesktopUsers = async (pgPool, filter) => { | ||||||||||
const { rows } = await pgPool.query(` | ||||||||||
export const fetchDailyDesktopUsers = async (pg, filter) => { | ||||||||||
const { rows } = await pg.stats.query(` | ||||||||||
SELECT | ||||||||||
day::TEXT, | ||||||||||
user_count | ||||||||||
|
@@ -180,4 +188,4 @@ export const fetchDailyDesktopUsers = async (pgPool, filter) => { | |||||||||
[filter.from, filter.to]) | ||||||||||
|
||||||||||
return rows | ||||||||||
} | ||||||||||
} |
Uh oh!
There was an error while loading. Please reload this page.