Skip to content

Commit

Permalink
Feat(tenant): Create a logger for each tenant
Browse files Browse the repository at this point in the history
  • Loading branch information
arimet committed Sep 25, 2023
1 parent ba3fe64 commit bdc565c
Show file tree
Hide file tree
Showing 13 changed files with 76 additions and 42 deletions.
2 changes: 1 addition & 1 deletion globalConfig.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"mongoUri":"mongodb://127.0.0.1:36595/jest?","mongoDBName":"jest"}
{"mongoUri":"mongodb://127.0.0.1:39569/jest?","mongoDBName":"jest"}
8 changes: 5 additions & 3 deletions src/api/controller/api/dataset.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import Koa from 'koa';
import route from 'koa-route';
import logger from '../../services/logger';
import koaBodyParser from 'koa-bodyparser';
import getLogger from '../../services/logger';

const app = new Koa();

Expand All @@ -17,7 +17,8 @@ export const clearDataset = async ctx => {
}
ctx.body = { status: 'success' };
} catch (error) {
logger.error('clear dataset error', {
const logger = getLogger(ctx.tenant);
logger.error(`Clear dataset error`, {
error,
});
ctx.body = { status: 'error', error };
Expand Down Expand Up @@ -94,7 +95,8 @@ export const deleteDatasetRow = async (ctx, id) => {
await ctx.dataset.deleteOne(id);
ctx.body = { status: 'deleted' };
} catch (error) {
logger.error('delete dataset row error', {
const logger = getLogger(ctx.tenant);
logger.error(`Delete dataset row error`, {
error,
});
ctx.body = { status: 'error', error };
Expand Down
5 changes: 3 additions & 2 deletions src/api/controller/api/publish.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ import route from 'koa-route';
import { v1 as uuid } from 'uuid';

import clearPublished from '../../services/clearPublished';
import logger from '../../services/logger';
import { workerQueues } from '../../workers';
import { PUBLISHER } from '../../workers/publisher';
import getLogger from '../../services/logger';

const app = new Koa();

Expand All @@ -28,7 +28,8 @@ export const handleClearPublished = async ctx => {
status: 'success',
};
} catch (error) {
logger.error(`handle clear published error - ${ctx.tenant}`, {
const logger = getLogger(ctx.tenant);
logger.error(`Handle clear published error`, {
error,
});
ctx.body = {
Expand Down
6 changes: 4 additions & 2 deletions src/api/controller/api/publishFacets.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ export default async (ctx, fields, withProgress = false) => {
label: 'publishing',
type: 'publisher',
});
jobLogger.info(ctx.job, `Publishing facets - ${ctx.tenant}`);

jobLogger.info(ctx.job, `Publishing facets`);

const names = fields.map(({ name }) => name);
await ctx.publishedFacet.remove({ field: { $in: names } });
Expand All @@ -42,5 +43,6 @@ export default async (ctx, fields, withProgress = false) => {
progress.throw(ctx.tenant, error);
});
withProgress && progress.finish(ctx.tenant);
jobLogger.info(ctx.job, `Facets published - ${ctx.tenant}`);

jobLogger.info(ctx.job, `Facets published`);
};
5 changes: 3 additions & 2 deletions src/api/controller/customPage.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import path from 'path';

import { getFileStatsIfExists, readFile } from '../services/fsHelpers';
import logger from '../services/logger';
import getLogger from '../services/logger';

const scriptRegEx = new RegExp('<script.*?( src=".*")?.*?>.*?</script>', 'gm');

Expand Down Expand Up @@ -46,7 +46,8 @@ export default async ctx => {
try {
html = (await readFile(pathname)).toString();
} catch (error) {
logger.error('Unable to read custom page file', {
const logger = getLogger(ctx.tenant);
logger.error(`Unable to read custom page file`, {
pathname,
error,
});
Expand Down
6 changes: 4 additions & 2 deletions src/api/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import { KoaAdapter } from '@bull-board/koa';
import { createBullBoard } from '@bull-board/api';
import { BullAdapter } from '@bull-board/api/bullAdapter';

import logger from './services/logger';
import controller from './controller';
import testController from './controller/testController';
import indexSearchableFields from './services/indexSearchableFields';
Expand All @@ -25,6 +24,7 @@ import Meter from '@uswitch/koa-prometheus';
import MeterConfig from '@uswitch/koa-prometheus/build/koa-prometheus.defaults.json';
import tracer, { eventTrace, eventError } from '@uswitch/koa-tracer';
import access, { eventAccess } from '@uswitch/koa-access';
import getLogger from './services/logger';

// KoaQs use qs to parse query string. There is an default limit of 20 items in an array. Above this limit, qs will transform the array into an key/value object.
// We need to increase this limit to 1000 to be able to handle the facets array in the query string.
Expand Down Expand Up @@ -86,7 +86,8 @@ app.use(async (ctx, next) => {
);
ctx.job = filteredActiveJobs[0];
} catch (e) {
logger.error('An error occured on loading running job', e);
const logger = getLogger(ctx.tenant);
logger.error(`An error occured on loading running job`, e);
}
await next();
});
Expand All @@ -104,6 +105,7 @@ app.use(async (ctx, next) => {
}
await next();
ctx.httpLog.status = ctx.status;
const logger = getLogger(ctx.tenant);
logger.info(ctx.request.url, ctx.httpLog);
});

Expand Down
28 changes: 15 additions & 13 deletions src/api/services/enrichment/enrichment.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import {
import { ENRICHING, PENDING } from '../../../common/progressStatus';
import { jobLogger } from '../../workers/tools';
import { CancelWorkerError } from '../../workers';
import logger from '../logger';
import getLogger from '../logger';

const { enrichmentBatchSize: BATCH_SIZE = 10 } = localConfig;

Expand Down Expand Up @@ -96,7 +96,8 @@ export const getEnrichmentDataPreview = async ctx => {
result.push(...values.map(v => v.value));
}
} catch (error) {
logger.error('Error while processing enrichment preview', error);
const logger = getLogger(ctx.tenant);
logger.error(`Error while processing enrichment preview`, error);
return [];
}
return result;
Expand Down Expand Up @@ -193,7 +194,8 @@ const processEzsEnrichment = (entries, commands, ctx, preview = false) => {
try {
sourceChunk = JSON.parse(error.sourceChunk);
} catch (e) {
logger.error('Error while parsing sourceChunk', e);
const logger = getLogger(ctx.tenant);
logger.error(`Error while parsing sourceChunk`, e);
}
}
return values.push({
Expand All @@ -220,7 +222,7 @@ export const processEnrichment = async (enrichment, ctx) => {
{ $set: { ['status']: IN_PROGRESS } },
);
let errorCount = 0;

const room = `${ctx.tenant}-enrichment-job-${ctx.job.id}`;
const commands = createEzsRuleCommands(enrichment.rule);
const dataSetSize = await ctx.dataset.count();
Expand All @@ -239,15 +241,15 @@ export const processEnrichment = async (enrichment, ctx) => {
if (!entry.uri) {
const logData = JSON.stringify({
level: 'error',
message: `Unable to enrich row with no URI, see object _id#${entry._id}`,
message: `[Instance: ${ctx.tenant}] Unable to enrich row with no URI, see object _id#${entry._id}`,
timestamp: new Date(),
status: IN_PROGRESS,
});
jobLogger.info(ctx.job, logData);
} else {
const logData = JSON.stringify({
level: 'info',
message: `Started enriching #${entry.uri}`,
message: `[Instance: ${ctx.tenant}] Started enriching #${entry.uri}`,
timestamp: new Date(),
status: IN_PROGRESS,
});
Expand Down Expand Up @@ -281,8 +283,8 @@ export const processEnrichment = async (enrichment, ctx) => {
const logData = JSON.stringify({
level: enrichedValue.error ? 'error' : 'info',
message: enrichedValue.error
? `Error enriching #${id}: ${value}`
: `Finished enriching #${id} (output: ${value})`,
? `[Instance: ${ctx.tenant}] Error enriching #${id}: ${value}`
: `[Instance: ${ctx.tenant}] Finished enriching #${id} (output: ${value})`,
timestamp: new Date(),
status: IN_PROGRESS,
});
Expand Down Expand Up @@ -312,7 +314,7 @@ export const processEnrichment = async (enrichment, ctx) => {

const logData = JSON.stringify({
level: 'error',
message: e.message,
message: `[Instance: ${ctx.tenant}] ${e.message}`,
timestamp: new Date(),
status: IN_PROGRESS,
});
Expand All @@ -335,7 +337,7 @@ export const processEnrichment = async (enrichment, ctx) => {
progress.finish(ctx.tenant);
const logData = JSON.stringify({
level: 'ok',
message: `Enrichement finished - ${ctx.tenant}`,
message: `[Instance: ${ctx.tenant}] Enrichement finished`,
timestamp: new Date(),
status: FINISHED,
});
Expand Down Expand Up @@ -369,7 +371,7 @@ export const startEnrichment = async ctx => {
const room = `enrichment-job-${ctx.job.id}`;
const logData = JSON.stringify({
level: 'ok',
message: `Enrichement started`,
message: `[Instance: ${ctx.tenant}] Enrichement started`,
timestamp: new Date(),
status: IN_PROGRESS,
});
Expand Down Expand Up @@ -397,8 +399,8 @@ export const setEnrichmentError = async (ctx, err) => {
level: 'error',
message:
err instanceof CancelWorkerError
? `${err?.message}`
: `Enrichement errored : ${err?.message}`,
? `[Instance: ${ctx.tenant}] ${err?.message}`
: `[Instance: ${ctx.tenant}] Enrichement errored : ${err?.message}`,
timestamp: new Date(),
status: err instanceof CancelWorkerError ? CANCELED : ERROR,
});
Expand Down
3 changes: 3 additions & 0 deletions src/api/services/enrichment/enrichment.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -372,6 +372,9 @@ describe('enrichment', () => {
id: 1,
log: jest.fn(),
isActive: jest.fn().mockReturnValue(true),
data: {
tenant: 'lodex_test',
},
},
enrichment: {
updateOne: jest.fn(),
Expand Down
37 changes: 26 additions & 11 deletions src/api/services/logger.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,29 @@ import winston, { createLogger, format } from 'winston';
import config from 'config';
import { MESSAGE } from 'triple-beam';

export default createLogger({
level: process.env.NODE_ENV === 'development' ? 'debug' : 'info',
transports: [new winston.transports.Console()],
format: format.combine(
format.timestamp(),
format.colorize(),
format.simple(),
format.printf(info => `${info.timestamp} ${info[MESSAGE]}`),
),
silent: config.logger.disabled,
});
const loggers = new Map();

const getLogger = tenant => {
if (loggers.has(tenant)) {
return loggers.get(tenant);
}

const logger = createLogger({
level: process.env.NODE_ENV === 'development' ? 'debug' : 'info',
transports: [new winston.transports.Console()],
format: format.combine(
format.timestamp(),
format.colorize(),
format.simple(),
format.printf(
info => ` [${tenant}] ${info.timestamp} ${info[MESSAGE]}`,
),
),
silent: config.logger.disabled,
});

loggers.set(tenant, logger);
return logger;
};

export default getLogger;
4 changes: 2 additions & 2 deletions src/api/services/publishCharacteristics.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export const publishCharacteristicsFactory = ({ getDocumentTransformer }) =>
return;
}

jobLogger.info(ctx.job, 'Publishing characteristics');
jobLogger.info(ctx.job, `Publishing characteristics`);
const getPublishedCharacteristics = getDocumentTransformer(
ctx.dataset.findBy,
datasetScopeFields,
Expand All @@ -34,7 +34,7 @@ export const publishCharacteristicsFactory = ({ getDocumentTransformer }) =>
publishedCharacteristics,
);
}
jobLogger.info(ctx.job, 'Characteristics published');
jobLogger.info(ctx.job, `Characteristics published`);
};

export default publishCharacteristicsFactory({ getDocumentTransformer });
7 changes: 4 additions & 3 deletions src/api/services/publishDocuments.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ import { PUBLISH_DOCUMENT } from '../../common/progressStatus';
import { URI_FIELD_NAME } from '../../common/uris';
import { SCOPE_COLLECTION, SCOPE_DOCUMENT } from '../../common/scope';
import parseValue from '../../common/tools/parseValue';
import logger from './logger';
import { jobLogger } from '../workers/tools';
import getLogger from './logger';

export const versionTransformerDecorator = (
transformDocument,
Expand Down Expand Up @@ -88,10 +88,11 @@ export const publishDocumentsFactory = ({
transformAllDocuments,
}) => async (ctx, count, fields) => {
if (!ctx.job) {
logger.error('Job is not defined');
const logger = getLogger(ctx.tenant);
logger.error(`Job is not defined`);
return;
}
jobLogger.info(ctx.job, 'Publishing documents');
jobLogger.info(ctx.job, `Publishing documents`);
const mainResourceFields = fields
.filter(
c =>
Expand Down
3 changes: 3 additions & 0 deletions src/api/services/publishDocuments.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ const fields = [
const job = {
log: jest.fn(),
isActive: () => true,
data: {
tenant: 'lodex_test',
},
};
const getCtx = ({ subresources } = {}) => ({
tenant: 'lodex_test',
Expand Down
4 changes: 3 additions & 1 deletion src/api/workers/tools.js
Original file line number Diff line number Diff line change
@@ -1,19 +1,21 @@
import { cleanWaitingJobsOfType, workerQueues } from '.';
import logger from '../services/logger';
import getLogger from '../services/logger';
import progress from '../services/progress';

export const jobLogger = {
info: (job, message) => {
if (!job) {
return;
}
const logger = getLogger(job.data.tenant);
logger.info(message);
job.log(message);
},
error: (job, message) => {
if (!job) {
return;
}
const logger = getLogger(job.data.tenant);
logger.error(message);
job.log(message);
},
Expand Down

0 comments on commit bdc565c

Please sign in to comment.