Skip to content

Commit

Permalink
Merge pull request #319 from bcgov/ssoteam-396
Browse files Browse the repository at this point in the history
Fix: kc-cron-job hanging
  • Loading branch information
jlangy authored Oct 18, 2023
2 parents 00bc1fa + 2bd04eb commit 285040b
Show file tree
Hide file tree
Showing 8 changed files with 2,242 additions and 8 deletions.
26 changes: 26 additions & 0 deletions .github/workflows/unit-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Run unit tests

on:
push:
paths:
- "docker/kc-cron-job/**"
- ".github/workflows/unit-test.yml"

jobs:
unit_test:
runs-on: ubuntu-20.04
defaults:
run:
working-directory: ./docker/kc-cron-job
steps:
# Matching docker version
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
- name: unit test
run: |
ls
yarn
yarn test
2 changes: 2 additions & 0 deletions docker/kc-cron-job/.dockerignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
node_modules
npm-debug.log
__tests__
jest.config.js
15 changes: 15 additions & 0 deletions docker/kc-cron-job/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# About

This is the codebase for the [related helm chart](../../helm/kc-cron-job/).

## Zip Logs

The [zip logs](./zip-logs.js) job is designed to create zip files of the logs as the buildup in keycloak, and insert a copy into our database. Keycloak's [configuration](/docker/keycloak/configuration/standalone-openshift-7.6.xml#L68) periodically aggregates log files into dated files. The workflow for this job uses that to:

- zip logs files from the same date together into one folder
- delete zipped logs older than the EXPIRY_LENGTH_DAYS variable
- insert logs into a database for longer storage

### Testing

Tests for this job can be run with `yarn test`.
70 changes: 70 additions & 0 deletions docker/kc-cron-job/__tests__/zip-logs.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
const { saveFilesToDatabase, getDate, getClient } = require('../event-logs');
const fsPromises = require('fs').promises;
const path = require('path');
const { Client } = require('pg');

/**
* First log is an empty line
* Second log is missing expected "sequence" parameter
* Third line contains all expected fields
*/
const unexpectedLogs = `
{"@timestamp":"a","loggerClassName":"a","loggerName":"a","level":"a","message":"type=USER_INFO_REQUEST","threadName":"a","threadId":1,"mdc":{},"ndc":"","hostName":"a","processName":"a","processId":1,"@version":"1"}
{"@timestamp":"a","sequence":1001,"loggerClassName":"a","loggerName":"a","level":"a","message":"type=USER_INFO_REQUEST","threadName":"a","threadId":1,"mdc":{},"ndc":"","hostName":"a","processName":"a","processId":1,"@version":"1"}`;

const fileDir = getDate(2);
const dir = path.join(__dirname, `fixtures/${fileDir}`);

jest.mock('pg', () => {
const mockClient = {
connect: jest.fn(),
query: jest.fn(),
end: jest.fn(),
};
return { Client: jest.fn(() => mockClient) };
});

/**
* Create some fixture data with usable dates
*/
const setupFixtures = async (fileData) => {
await fsPromises.mkdir(dir, { recursive: true });
await fsPromises.writeFile(path.join(`${dir}/test`), fileData);
};

const clearFiles = () => fsPromises.rmdir(dir, { force: true, recursive: true });

describe('Save Files to Database', () => {
let client;
beforeEach(() => {
client = new Client();
});

afterAll(async () => {
await clearFiles();
});

it('Logs an info message if could not parse a line, and continues on to remaining logs', async () => {
console.info = jest.fn();

await setupFixtures(unexpectedLogs);
await saveFilesToDatabase('__tests__/fixtures');

// Empty line in logs should log a message indicating could not be parsed
const jsonParseError = console.info.mock.calls.find((call) =>
call[0].startsWith('Error trying to JSON parse line'),
);
expect(jsonParseError).not.toBe(undefined);

// Log with missing sequence should log a message indicating could not be uploaded
const unexpectedFormatError = console.info.mock.calls.find((call) =>
call[0].startsWith('Log does not have expected format'),
);
expect(unexpectedFormatError).not.toBe(undefined);

// FInal valid log is still sent to DB insert command (log with sequence number 1001)
const queries = client.query.mock.calls;
const insertionQuery = queries.find((query) => query[0].startsWith('INSERT INTO sso_logs'));
expect(insertionQuery[0].includes('1001')).toBe(true);
});
});
50 changes: 44 additions & 6 deletions docker/kc-cron-job/event-logs.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,23 @@ const LOG_BATCH_SIZE = process.env.LOG_BATCH_SIZE || 1000;
const RETENTION_PERIOD_DAYS = process.env.RETENTION_PERIOD_DAYS || 30;
const SAVE_LOGS_N_DAYS_AGO = process.env.SAVE_LOGS_N_DAYS_AGO || 2;

const logFields = [
'sequence',
'loggerClassName',
'loggerName',
'level',
'message',
'threadName',
'threadId',
'mdc',
'ndc',
'hostName',
'processName',
'processId',
'timestamp',
'version',
];

const getQuery = (logs) => {
const query = format(
`INSERT INTO sso_logs (
Expand Down Expand Up @@ -42,9 +59,16 @@ const saveLogsForFile = async (lineReader, client) => {
let i = 0;
let logs = [];
lineReader.on('line', async function (line) {
try {
// If cannot format or parse the log, move to next log without incrementing the batch size.
const formattedLog = formatLog(JSON.parse(line));
if (!formattedLog) return;
logs.push(formattedLog);
} catch (e) {
return console.info('Error trying to JSON parse line', e, line);
}

i++;
const formattedLog = formatLog(JSON.parse(line));
logs.push(formattedLog);
if (i === LOG_BATCH_SIZE) {
i = 0;
const queryLogs = [...logs];
Expand All @@ -55,8 +79,13 @@ const saveLogsForFile = async (lineReader, client) => {

return new Promise((resolve, reject) => {
lineReader.on('close', async () => {
await client.query(getQuery(logs));
resolve();
try {
await client.query(getQuery(logs));
resolve();
} catch (err) {
console.info('Error when inserting log data', err, logs);
reject(err);
}
});
});
};
Expand Down Expand Up @@ -94,6 +123,15 @@ const formatLog = (log) => {
delete log['@timestamp'];
delete log['@version'];
try {
const logKeys = Object.keys(log);
const logHasAllExpectedFields = logFields.every((field) => logKeys.includes(field));
const logHasUnexpectedFields = logKeys.some((key) => !logFields.includes(key));

if (!logHasAllExpectedFields || logHasUnexpectedFields) {
console.info('Log does not have expected format: ', log);
return null;
}

let { message } = log;
const json = {};
const fields = message.split(', ');
Expand All @@ -103,7 +141,7 @@ const formatLog = (log) => {
}
return Object.values({ ...log, message: json, namespace: process.env.NAMESPACE });
} catch (e) {
console.log('failed', message);
console.log('failed to format log', log);
return [];
}
};
Expand Down Expand Up @@ -171,4 +209,4 @@ async function saveFilesToDatabase(dirname) {
}
}

module.exports = { saveFilesToDatabase };
module.exports = { saveFilesToDatabase, getDate };
3 changes: 3 additions & 0 deletions docker/kc-cron-job/jest.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
module.exports = {
rootDir: '.',
};
6 changes: 4 additions & 2 deletions docker/kc-cron-job/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
"author": "First Last <[email protected]>",
"main": "server.js",
"scripts": {
"start": "node server.js"
"start": "node server.js",
"test": "jest"
},
"dependencies": {
"archiver": "^5.3.0",
Expand All @@ -20,6 +21,7 @@
"xml2js": "^0.6.0"
},
"devDependencies": {
"dotenv": "^16.3.1"
"dotenv": "^16.3.1",
"jest": "^29.7.0"
}
}
Loading

0 comments on commit 285040b

Please sign in to comment.