Skip to content

Commit

Permalink
metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
kamilkisiela committed Dec 17, 2024
1 parent c4dbf2f commit 368a6e6
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 19 deletions.
7 changes: 3 additions & 4 deletions packages/services/tokens/src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { z } from 'zod';
import { createErrorHandler, handleTRPCError, maskToken, metrics } from '@hive/service-common';
import type { inferRouterInputs, inferRouterOutputs } from '@trpc/server';
import { initTRPC } from '@trpc/server';
import { cacheHits, cacheMisses } from './metrics';
import { recordTokenRead } from './metrics';
import { Storage } from './multi-tier-storage';

const httpRequests = new metrics.Counter({
Expand Down Expand Up @@ -170,13 +170,12 @@ export const tokensApiRouter = t.router({
const cachedFailure = ctx.tokenReadFailuresCache.get(hash);

if (cachedFailure) {
cacheHits.inc(1);
throw new Error(cachedFailure);
}

try {
const result = await ctx.storage.readToken(hash, alias);

recordTokenRead(result ? 200 : 404);
// removes the token from the failures cache (in case the value expired)
ctx.tokenReadFailuresCache.delete(hash);

Expand All @@ -187,8 +186,8 @@ export const tokensApiRouter = t.router({
// set token read as failure
// so we don't try to read it again for next X minutes
ctx.tokenReadFailuresCache.set(hash, (error as Error).message);
cacheMisses.inc(1);

recordTokenRead(500);
throw error;
}
}),
Expand Down
34 changes: 25 additions & 9 deletions packages/services/tokens/src/metrics.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,32 @@
import type { LRUCache } from 'lru-cache';
import { metrics } from '@hive/service-common';

export const cacheHits = new metrics.Counter({
name: 'tokens_cache_hits',
help: 'Number of cache hits',
const tokenReads = new metrics.Counter({
name: 'tokens_reads',
help: 'Number of token reads',
labelNames: ['status'],
});

export const cacheMisses = new metrics.Counter({
name: 'tokens_cache_misses',
help: 'Number of cache misses',
const cacheReads = new metrics.Counter({
name: 'tokens_cache_reads',
help: 'Number of cache reads',
labelNames: ['status'],
});

export const cacheInvalidations = new metrics.Counter({
name: 'tokens_cache_invalidations',
help: 'Number of cache invalidations',
const cacheFills = new metrics.Counter({
name: 'tokens_cache_fills',
help: 'Number of cache fills',
labelNames: ['source'],
});

export function recordCacheRead(status: NonNullable<LRUCache.Status<unknown>['fetch']>) {
cacheReads.inc({ status });
}

export function recordCacheFill(source: 'db' | 'redis-fresh' | 'redis-stale') {
cacheFills.inc({ source });
}

export function recordTokenRead(status: 200 | 500 | 404) {
tokenReads.inc({ status });
}
20 changes: 14 additions & 6 deletions packages/services/tokens/src/multi-tier-storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import ms from 'ms';
import { createConnectionString, createTokenStorage, Interceptor, tokens } from '@hive/storage';
import { captureException, captureMessage } from '@sentry/node';
import { atomic, until, useActionTracker } from './helpers';
import { cacheHits, cacheInvalidations, cacheMisses } from './metrics';
import { recordCacheFill, recordCacheRead } from './metrics';

type CacheEntry = StorageItem | 'not-found';

Expand Down Expand Up @@ -106,6 +106,7 @@ export async function createStorage(
}

if (redisData) {
recordCacheFill('redis-fresh');
logger.debug('Returning fresh data from Redis');
return JSON.parse(redisData) as CacheEntry;
}
Expand All @@ -114,6 +115,7 @@ export async function createStorage(
// Nothing in Redis, let's check the DB
const dbResult = await db.getToken({ token: hashedToken });
const cacheEntry = dbResult ? transformToken(dbResult) : 'not-found';
recordCacheFill('db');

// Write to Redis, so the next time we can read it from there
await setInRedis(redis, hashedToken, cacheEntry).catch(error => {
Expand Down Expand Up @@ -158,6 +160,7 @@ export async function createStorage(
throw error;
}

recordCacheFill('redis-stale');
logger.debug('Returning stale data from Redis');
// Stale data will be cached in the in-memory cache only, as it's not fresh.
return JSON.parse(staleRedisData) as CacheEntry;
Expand Down Expand Up @@ -189,12 +192,19 @@ export async function createStorage(
return tokens.map(transformToken);
},
async readToken(hashedToken, maskedToken) {
const status: LRUCache.Status<CacheEntry> = {};
const context = { maskedToken, source: 'in-memory' };
const data = await cache.fetch(hashedToken, {
context: {
maskedToken,
},
context,
status,
});

if (status.fetch) {
recordCacheRead(status.fetch);
} else {
serverLogger.warn('Status of the fetch is missing');
}

if (!data) {
// Looked up in all layers, and the token is not found
return null;
Expand Down Expand Up @@ -252,8 +262,6 @@ export async function createStorage(
return;
}

cacheInvalidations.inc();

await redis.del(
hashedTokens.map(generateRedisKey).concat(hashedTokens.map(generateStaleRedisKey)),
);
Expand Down

0 comments on commit 368a6e6

Please sign in to comment.