Skip to content

Commit

Permalink
Add initial tests.
Browse files Browse the repository at this point in the history
  • Loading branch information
dlongley committed Nov 11, 2024
1 parent 98c852d commit eaa7d68
Show file tree
Hide file tree
Showing 4 changed files with 208 additions and 17 deletions.
16 changes: 6 additions & 10 deletions lib/cache.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ bedrock.events.on('bedrock-mongodb.ready', async () => {
}];

// only create TTL expiration records if configured to do so
const {autoRemoveExpiredRecords} = bedrock.config.tokenizedCache;
const {autoRemoveExpiredRecords} = bedrock.config['tokenized-cache'];
if(autoRemoveExpiredRecords) {
indexes.push({
// automatically expire entries using `expires` date field
Expand Down Expand Up @@ -121,15 +121,16 @@ export async function get({id, tokenizedId, tokenizer, explain = false} = {}) {

const query = {'entry.tokenizedId': tokenizedId};
const collection = database.collections[COLLECTION_NAME];
const projection = {_id: 0};

if(explain) {
// 'find().limit(1)' is used here because 'findOne()' doesn't return a
// cursor which allows the use of the explain function.
const cursor = await collection.find(query).limit(1);
const cursor = await collection.find(query, {projection}).limit(1);
return cursor.explain('executionStats');
}

let record = await collection.findOne(query);
let record = await collection.findOne(query, {projection});
if(record) {
// explicitly check `expires` against current time to handle cases where
// the database record just hasn't been expunged yet
Expand Down Expand Up @@ -229,13 +230,8 @@ export async function upsert({
}

// this upsert cannot trigger duplicate error; no try/catch needed
const result = await collection.updateOne(query, update, upsertOptions);
if(result.result.upserted) {
// return full record when upserted
return {_id: result.result.upserted[0]._id, ...record};
}
// return true/false on update
return result.result.n !== 0;
await collection.updateOne(query, update, upsertOptions);
return record;
}

/**
Expand Down
7 changes: 0 additions & 7 deletions test/mocha/10-api.js

This file was deleted.

174 changes: 174 additions & 0 deletions test/mocha/10-cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
/*!
* Copyright (c) 2020-2024 Digital Bazaar, Inc. All rights reserved.
*/
import {cleanDB, insertRecord} from './helpers.js';
import {mockEntryRecord1, mockEntryRecord2} from './mock.data.js';
import {cache} from '@bedrock/tokenized-cache';
import crypto from 'node:crypto';
import {tokenizers} from '@bedrock/tokenizer';

describe('Cache', function() {
describe('cache.upsert()', () => {
it('should insert and get a cache entry', async () => {
const id = crypto.randomUUID();
const record1 = await cache.upsert({
id,
value: {},
ttl: 30000
});
const record2 = await cache.get({id});
record1.should.eql(record2);
});

it('should replace an existing cache entry', async () => {
const id = crypto.randomUUID();
const record1 = await cache.upsert({
id,
value: {},
ttl: 30000
});
const record2 = await cache.upsert({
id,
value: {},
ttl: 40000
});
const record3 = await cache.get({id});
record1.should.not.eql(record2);
record2.should.eql(record3);
});

it('should error when no "id" is passed', async () => {
let err;
try {
await cache.upsert();
} catch(e) {
err = e;
}
err.message.should.include('Either "id" or "tokenizedId"');
});
});

describe('cache.get()', () => {
it('should error when no "id" is passed', async () => {
let err;
try {
await cache.get();
} catch(e) {
err = e;
}
err.message.should.include('Either "id" or "tokenizedId"');
});
});

describe('cache._hmacString()', () => {
let hmac;
before(async () => {
({hmac} = await tokenizers.getCurrent());
});

it('should produce a 34 byte Buffer give a small value', async () => {
let result;
let error;
const value = '670dbcb1-164a-4d47-8d54-e3e89f5831f9';
try {
result = await cache._hmacString({hmac, value});
} catch(e) {
error = e;
}
assertNoError(error);
result.should.be.instanceOf(Buffer);
result.should.have.length(34);
});

it('should produce a 34 byte Buffer given a large value', async () => {
let result;
let error;
const value = crypto.randomBytes(4096).toString('hex');
try {
result = await cache._hmacString({hmac, value});
} catch(e) {
error = e;
}
assertNoError(error);
result.should.be.instanceOf(Buffer);
result.should.have.length(34);
});

it('should produce the same output given the same value', async () => {
let result1;
let error;
const value = '294c9caa-707a-4758-ae5c-fe7306c25cc2';
try {
result1 = await cache._hmacString({hmac, value});
} catch(e) {
error = e;
}
assertNoError(error);

let result2;
error = undefined;
try {
result2 = await cache._hmacString({hmac, value});
} catch(e) {
error = e;
}
assertNoError(error);

result1.should.eql(result2);
});

it('should produce different output given different values', async () => {
let result1;
let error;
try {
result1 = await cache._hmacString({
hmac,
value: '294c9caa-707a-4758-ae5c-fe7306c25cc2'
});
} catch(e) {
error = e;
}
assertNoError(error);

let result2;
error = undefined;
try {
result2 = await cache._hmacString({
hmac,
value: '0e26c923-84e6-4918-9337-f82c56951007'
});
} catch(e) {
error = e;
}
assertNoError(error);

result1.should.not.eql(result2);
});
});
});

describe('Cache Entry Database Tests', function() {
describe('Indexes', function() {
beforeEach(async () => {
const collectionName = 'tokenized-cache-entry';
await cleanDB({collectionName});

await insertRecord({record: mockEntryRecord1, collectionName});
// second record is inserted here in order to do proper assertions for
// 'nReturned', 'totalKeysExamined' and 'totalDocsExamined'.
await insertRecord({record: mockEntryRecord2, collectionName});
});
it('is properly indexed for query of ' +
`'entry.tokenizedId' in get()`, async function() {
const {tokenizedId} = mockEntryRecord1.entry;
const {executionStats} = await cache.get({tokenizedId, explain: true});
executionStats.nReturned.should.equal(1);
executionStats.totalKeysExamined.should.equal(1);
executionStats.totalDocsExamined.should.equal(1);
executionStats.executionStages.inputStage.inputStage.inputStage.stage
.should.equal('IXSCAN');
executionStats.executionStages.inputStage.inputStage.inputStage
.keyPattern.should.eql({'entry.tokenizedId': 1});
});
});
});
28 changes: 28 additions & 0 deletions test/mocha/mock.data.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,31 @@ for(const product of products) {
mockData.productIdMap.set(product.id, product);
mockData.productIdMap.set(product.name, product);
}

const now = Date.now();
const tomorrow = new Date();
tomorrow.setDate(tomorrow.getDate() + 1);

export const mockEntryRecord1 = {
meta: {
created: now,
updated: now
},
entry: {
tokenizedId: Buffer.from('43f14128-3b42-11ec-8d3d-0242ac130003'),
value: {},
expires: tomorrow
}
};

export const mockEntryRecord2 = {
meta: {
created: now,
updated: now
},
registration: {
tokenizedId: Buffer.from('448de567-5e19-4a54-8b0e-1d0e2128f13d'),
value: {},
expires: new Date(now + 3000)
}
};

0 comments on commit eaa7d68

Please sign in to comment.