Skip to content

Commit

Permalink
Add tests for retrieval times
Browse files Browse the repository at this point in the history
  • Loading branch information
pyropy committed Jan 13, 2025
1 parent 5caeec1 commit 067e7d4
Show file tree
Hide file tree
Showing 3 changed files with 77 additions and 18 deletions.
5 changes: 1 addition & 4 deletions stats/lib/handler.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ import {
fetchDailyRetrievalResultCodes,
fetchDailyMinerRSRSummary,
fetchDailyRetrievalTimes,
fetchDailyMinerRetrievalTimes,
fetchRetrievalTimesSummary
fetchDailyMinerRetrievalTimes
} from './stats-fetchers.js'

import { handlePlatformRoutes } from './platform-routes.js'
Expand Down Expand Up @@ -111,8 +110,6 @@ const handler = async (req, res, pgPools, SPARK_API_BASE_URL) => {
await respond(fetchMinersRSRSummary)
} else if (req.method === 'GET' && url === '/retrieval-result-codes/daily') {
await respond(fetchDailyRetrievalResultCodes)
} else if (req.method === 'GET' && url === '/retrieval-times/summary') {
await respond(fetchRetrievalTimesSummary)
} else if (req.method === 'GET' && url === '/retrieval-times/daily') {
await respond(fetchDailyRetrievalTimes)
} else if (req.method === 'GET' && segs[0] === 'miner' && segs[1] && segs[2] === 'retrieval-times' && segs[3] === 'summary') {
Expand Down
15 changes: 1 addition & 14 deletions stats/lib/stats-fetchers.js
Original file line number Diff line number Diff line change
Expand Up @@ -291,19 +291,6 @@ export const fetchDailyRetrievalResultCodes = async (pgPools, filter) => {
return stats
}

/**
* Fetches global retrieval time statistics
* @param {import('@filecoin-station/spark-stats-db').PgPools} pgPools
* @param {import('./typings.js').DateRangeFilter} filter
*/
export const fetchRetrievalTimesSummary = async (pgPools, filter) => {
const { rows } = await pgPools.evaluate.query(`
SELECT percentile_cont(0.5) WITHIN GROUP (ORDER BY time_to_first_byte_p50) AS ttfb_p50
FROM retrieval_times
`)
return rows
}

/**
* Fetches daily global retrieval time statistics
* @param {import('@filecoin-station/spark-stats-db').PgPools} pgPools
Expand Down Expand Up @@ -339,7 +326,7 @@ export const fetchDailyMinerRetrievalTimes = async (pgPools, { from, to }, miner
percentile_cont(0.5) WITHIN GROUP (ORDER BY time_to_first_byte_p50) AS ttfb_p50
FROM retrieval_times
WHERE miner_id = $1 AND day >= $2 AND day <= $3
GROUP BY day, miner_id
GROUP BY day, miner_id
ORDER BY day
`, [
minerId,
Expand Down
75 changes: 75 additions & 0 deletions stats/test/handler.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -768,6 +768,65 @@ describe('HTTP request handler', () => {
])
})
})

describe('miner retrieval time stats', () => {
beforeEach(async () => {
// before the range
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1one', taskId: 'cidone::f1one::0', timeToFirstByteP50: 1000 })
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-09', minerId: 'f1two', taskId: 'cidone::f1two::0', timeToFirstByteP50: 1000 })
// in the range
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1one', taskId: 'cidone::f1one::1', timeToFirstByteP50: 1000 })
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-20', minerId: 'f1two', taskId: 'cidone::f1two::1', timeToFirstByteP50: 1000 })

await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', taskId: 'cidone::f1one::2', timeToFirstByteP50: 3000 })
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', taskId: 'cidone::f1two::2', timeToFirstByteP50: 3000 })
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1one', taskId: 'cidone::f1one::3', timeToFirstByteP50: 1000 })
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-10', minerId: 'f1two', taskId: 'cidone::f1two::3', timeToFirstByteP50: 1000 })
// after the range
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1one', taskId: 'cidone::f1one::4', timeToFirstByteP50: 1000 })
await givenRetrievalTimes(pgPools.evaluate, { day: '2024-01-21', minerId: 'f1two', taskId: 'cidone::f1two::4', timeToFirstByteP50: 1000 })
})

it('lists daily retrieval times in given date range', async () => {
const res = await fetch(
new URL(
'/retrieval-times/daily?from=2024-01-10&to=2024-01-20',
baseUrl
), {
redirect: 'manual'
}
)
await assertResponseStatus(res, 200)

const stats = /** @type {{ day: string, success_rate: number }[]} */(
await res.json()
)
assert.deepStrictEqual(stats, [
{ day: '2024-01-10', ttfb_p50: 2000 },
{ day: '2024-01-20', ttfb_p50: 1000 }
])
})

it('lists daily retrieval times summary for specified miner in given date range', async () => {
const res = await fetch(
new URL(
'/miner/f1one/retrieval-times/summary?from=2024-01-10&to=2024-01-20',
baseUrl
), {
redirect: 'manual'
}
)
await assertResponseStatus(res, 200)

const stats = /** @type {{ day: string, success_rate: number }[]} */(
await res.json()
)
assert.deepStrictEqual(stats, [
{ day: '2024-01-10', miner_id: 'f1one', ttfb_p50: 2000 },
{ day: '2024-01-20', miner_id: 'f1one', ttfb_p50: 1000 }
])
})
})
})

/**
Expand Down Expand Up @@ -844,3 +903,19 @@ const givenDailyDealStats = async (pgPool, {
retrievable
])
}

/**
*
* @param {import('../lib/platform-stats-fetchers.js').Queryable} pgPool
* @param {object} data
* @param {string} data.day
* @param {string} data.minerId
* @param {string} data.taskId
* @param {number} data.timeToFirstByteP50
*/
const givenRetrievalTimes = async (pgPool, { day, minerId, taskId, timeToFirstByteP50 }) => {
await pgPool.query(
'INSERT INTO retrieval_times (day, miner_id, task_id, time_to_first_byte_p50) VALUES ($1, $2, $3, $4)',
[day, minerId ?? 'f1test', taskId ?? 'cidone::f1test::0', timeToFirstByteP50]
)
}

0 comments on commit 067e7d4

Please sign in to comment.