Skip to content

Commit

Permalink
feat: cron job to remove vc users from std and parent realms (#320)
Browse files Browse the repository at this point in the history
* feat: cron job to remove vc users from std and parent realms

* feat: pass kc urls through env vars

* feat: use node 18

* feat: fixed unit tests

* feat: fixed eslint issues

* feat: cleaned up helm values and templates
  • Loading branch information
NithinKuruba authored Oct 27, 2023
1 parent 285040b commit ffe1612
Show file tree
Hide file tree
Showing 24 changed files with 1,917 additions and 360 deletions.
26 changes: 13 additions & 13 deletions .github/workflows/unit-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@ name: Run unit tests
on:
push:
paths:
- "docker/kc-cron-job/**"
- ".github/workflows/unit-test.yml"
- 'docker/kc-cron-job/**'
- '.github/workflows/unit-test.yml'

jobs:
unit_test:
Expand All @@ -13,14 +13,14 @@ jobs:
run:
working-directory: ./docker/kc-cron-job
steps:
# Matching docker version
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 14
- name: unit test
run: |
ls
yarn
yarn test
# Matching docker version
- uses: actions/checkout@v3
- name: Use Node.js 14
uses: actions/setup-node@v3
with:
node-version: 18
- name: unit test
run: |
ls
yarn
yarn test
2 changes: 2 additions & 0 deletions docker/kc-cron-job/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,5 @@ PGDATABASE=
CSS_API_URL=http://localhost:8080/app
CSS_API_AUTH_SECRET=
RC_WEBHOOK=
VC_USERS_RETENTION_DAYS=
INACTIVE_IDIR_USERS_RETENTION_DAYS=
28 changes: 28 additions & 0 deletions docker/kc-cron-job/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
module.exports = {
env: {
browser: true,
commonjs: true,
es2021: true
},
extends: 'standard',
overrides: [
{
env: {
node: true
},
files: ['.eslintrc.{js,cjs}'],
parserOptions: {
sourceType: 'script'
}
}
],
parserOptions: {
ecmaVersion: 'latest'
},
rules: {
'space-before-function-paren': [0],
'spaced-comment': [0],
semi: [0],
'array-callback-return': [0]
}
};
12 changes: 12 additions & 0 deletions docker/kc-cron-job/.prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"singleQuote": true,
"printWidth": 120,
"tabWidth": 2,
"useTabs": false,
"semi": true,
"quoteProps": "as-needed",
"trailingComma": "none",
"bracketSpacing": true,
"arrowParens": "always",
"jsxSingleQuote": false
}
2 changes: 1 addition & 1 deletion docker/kc-cron-job/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:14
FROM node:18

WORKDIR /usr/src/app

Expand Down
12 changes: 6 additions & 6 deletions docker/kc-cron-job/__tests__/zip-logs.test.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const { saveFilesToDatabase, getDate, getClient } = require('../event-logs');
const { saveFilesToDatabase, getDate } = require('../event-logs');
const { getPgClient } = require('../helpers.js');
const fsPromises = require('fs').promises;
const path = require('path');
const { Client } = require('pg');

/**
* First log is an empty line
Expand All @@ -19,7 +19,7 @@ jest.mock('pg', () => {
const mockClient = {
connect: jest.fn(),
query: jest.fn(),
end: jest.fn(),
end: jest.fn()
};
return { Client: jest.fn(() => mockClient) };
});
Expand All @@ -37,7 +37,7 @@ const clearFiles = () => fsPromises.rmdir(dir, { force: true, recursive: true })
describe('Save Files to Database', () => {
let client;
beforeEach(() => {
client = new Client();
client = getPgClient();
});

afterAll(async () => {
Expand All @@ -52,13 +52,13 @@ describe('Save Files to Database', () => {

// Empty line in logs should log a message indicating could not be parsed
const jsonParseError = console.info.mock.calls.find((call) =>
call[0].startsWith('Error trying to JSON parse line'),
call[0].startsWith('Error trying to JSON parse line')
);
expect(jsonParseError).not.toBe(undefined);

// Log with missing sequence should log a message indicating could not be uploaded
const unexpectedFormatError = console.info.mock.calls.find((call) =>
call[0].startsWith('Log does not have expected format'),
call[0].startsWith('Log does not have expected format')
);
expect(unexpectedFormatError).not.toBe(undefined);

Expand Down
29 changes: 8 additions & 21 deletions docker/kc-cron-job/active-sessions.js
Original file line number Diff line number Diff line change
@@ -1,60 +1,47 @@
const _ = require('lodash');
const { Client } = require('pg');
const format = require('pg-format');
const { getPgClient } = require('./helpers');
const KcAdminClient = require('keycloak-admin').default;

const KEYCLOAK_URL = process.env.KEYCLOAK_URL || 'https://dev.oidc.gov.bc.ca';
const KEYCLOAK_CLIENT_ID = process.env.KEYCLOAK_CLIENT_ID || 'script-cli';
const KEYCLOAK_CLIENT_SECRET = process.env.KEYCLOAK_CLIENT_SECRET;
const PGHOST = process.env.PGHOST;
const PGPORT = process.env.PGPORT || '5432';
const PGUSER = process.env.PGUSER;
const PGPASSWORD = process.env.PGPASSWORD;
const PGDATABASE = process.env.PGDATABASE;

const kcAdminClient = new KcAdminClient({
baseUrl: `${KEYCLOAK_URL}/auth`,
realmName: 'master',
realmName: 'master'
});

async function main() {
try {
await kcAdminClient.auth({
grantType: 'client_credentials',
clientId: KEYCLOAK_CLIENT_ID,
clientSecret: KEYCLOAK_CLIENT_SECRET,
clientSecret: KEYCLOAK_CLIENT_SECRET
});

// see https://node-postgres.com/api/client#new-clientconfig-object
const client = new Client({
host: PGHOST,
port: parseInt(PGPORT),
user: PGUSER,
password: PGPASSWORD,
database: PGDATABASE,
ssl: { rejectUnauthorized: false },
});
const client = getPgClient();

const realms = await kcAdminClient.realms.find({});
const dataset = [];
await Promise.all(
realms.map(async (realm) => {
const sessions = await kcAdminClient.sessions.find({
realm: realm.realm,
realm: realm.realm
});
sessions.map((session) => {
const sessionActiveCount = parseInt(session.active);
const sessionActiveCount = parseInt(session.active, 10);
const sessionClientID = session.clientId;
if (sessionActiveCount > 0) {
dataset.push([KEYCLOAK_URL, realm.realm, sessionClientID, sessionActiveCount]);
}
});
}),
})
);

const query = format(
'INSERT INTO active_sessions (keycloak_url, realm, client_id, session_count) VALUES %L',
dataset,
dataset
);

await client.connect();
Expand Down
58 changes: 12 additions & 46 deletions docker/kc-cron-job/event-logs.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
const _ = require('lodash');
const { Client } = require('pg');
const format = require('pg-format');
const fsPromises = require('fs').promises;
const fs = require('fs');
const readline = require('readline');
const { deleteLegacyData, getPgClient } = require('./helpers');

const PGHOST = process.env.PGHOST || 'localhost';
const PGPORT = process.env.PGPORT || '5432';
const PGUSER = process.env.PGUSER || 'postgres';
const PGPASSWORD = process.env.PGPASSWORD || 'postgres';
const PGDATABASE = process.env.PGDATABASE || 'postgres';
const LOG_BATCH_SIZE = process.env.LOG_BATCH_SIZE || 1000;
const RETENTION_PERIOD_DAYS = process.env.RETENTION_PERIOD_DAYS || 30;
const SAVE_LOGS_N_DAYS_AGO = process.env.SAVE_LOGS_N_DAYS_AGO || 2;
Expand All @@ -28,7 +22,7 @@ const logFields = [
'processName',
'processId',
'timestamp',
'version',
'version'
];

const getQuery = (logs) => {
Expand All @@ -50,7 +44,7 @@ const getQuery = (logs) => {
version,
namespace
) VALUES %L`,
logs,
logs
);
return query;
};
Expand Down Expand Up @@ -95,7 +89,7 @@ const reduceDataFromFiles = async (dirname) => {
let client;

try {
client = getClient();
client = getPgClient();
await client.connect();
if (!fs.existsSync(dirname)) {
console.info(`Directory ${dirname} does not exist.`);
Expand All @@ -105,7 +99,7 @@ const reduceDataFromFiles = async (dirname) => {
const files = await fsPromises.readdir(dirname);
for (const filename of files) {
const lineReader = readline.createInterface({
input: fs.createReadStream(`${dirname}/${filename}`),
input: fs.createReadStream(`${dirname}/${filename}`)
});
promises.push(saveLogsForFile(lineReader, client));
}
Expand All @@ -118,8 +112,8 @@ const reduceDataFromFiles = async (dirname) => {
};

const formatLog = (log) => {
log['timestamp'] = log['@timestamp'];
log['version'] = log['@version'];
log.timestamp = log['@timestamp'];
log.version = log['@version'];
delete log['@timestamp'];
delete log['@version'];
try {
Expand All @@ -132,10 +126,10 @@ const formatLog = (log) => {
return null;
}

let { message } = log;
const { message } = log;
const json = {};
const fields = message.split(', ');
for (field of fields) {
for (const field of fields) {
const [key, val] = field.split(/=(.+)/);
json[key] = val;
}
Expand All @@ -146,42 +140,14 @@ const formatLog = (log) => {
}
};

const getClient = () => {
const client = new Client({
host: PGHOST,
port: parseInt(PGPORT),
user: PGUSER,
password: PGPASSWORD,
database: PGDATABASE,
ssl: { rejectUnauthorized: false },
});
return client;
};

const clearOldLogs = async (retentionPeriodDays) => {
console.info('Removing old logs from database...');
let client;
try {
client = getClient();
await client.connect();
const query = `DELETE from sso_logs where timestamp < NOW() - INTERVAL '${retentionPeriodDays} DAYS' and namespace = '${process.env.NAMESPACE}';`;
console.info(`Running delete query: ${query}`);
await client.query(query);
} catch (e) {
console.error(e);
} finally {
await client.end();
}
};

const parseLogStats = async () => {
console.info('Collecting log stats...');
let client;
try {
client = getClient();
client = getPgClient();
await client.connect();
console.info('running save_log_types function...');
const saveStatsQuery = `SELECT save_log_types();`;
const saveStatsQuery = 'SELECT save_log_types();';
await client.query(saveStatsQuery);
} catch (e) {
console.error(e);
Expand All @@ -201,7 +167,7 @@ async function saveFilesToDatabase(dirname) {
try {
const dateToSave = getDate(SAVE_LOGS_N_DAYS_AGO);
const previousDayLogsFolder = `${dirname}/${dateToSave}`;
await clearOldLogs(RETENTION_PERIOD_DAYS);
await deleteLegacyData('sso_logs', RETENTION_PERIOD_DAYS);
await reduceDataFromFiles(previousDayLogsFolder);
await parseLogStats();
} catch (err) {
Expand Down
Loading

0 comments on commit ffe1612

Please sign in to comment.