From c32119ec65061b2fb67d5bea24dac4de80d664fa Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 11 Jun 2024 14:49:41 -0700 Subject: [PATCH 01/60] first pass at ssdi backend --- src/queries/class-goal-dataset.sql | 26 ++-- src/queries/class-goal-use.sql | 30 ++-- src/queries/fake-class-goal-use.sql | 30 ++-- src/queries/fei-goals.sql | 26 ++-- src/queries/monthly-delivery-report.sql | 8 +- src/routes/ssdi/handlers.test.js | 125 +++++++++++++++++ src/routes/ssdi/handlers.ts | 77 +++++++++++ src/routes/ssdi/index.ts | 15 ++ src/services/ssdi.test.js | 171 +++++++++++++++++++++++ src/services/ssdi.ts | 174 ++++++++++++++++++++++++ 10 files changed, 628 insertions(+), 54 deletions(-) create mode 100644 src/routes/ssdi/handlers.test.js create mode 100644 src/routes/ssdi/handlers.ts create mode 100644 src/routes/ssdi/index.ts create mode 100644 src/services/ssdi.test.js create mode 100644 src/services/ssdi.ts diff --git a/src/queries/class-goal-dataset.sql b/src/queries/class-goal-dataset.sql index 816ccc5ee6..e5f6c035f0 100644 --- a/src/queries/class-goal-dataset.sql +++ b/src/queries/class-goal-dataset.sql @@ -1,19 +1,21 @@ /** -* This query collects all the goals. +* @name: Goals Report +* @description: This query collects all the goals. +* @defaultOutputName: goals_report * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2022-07-01","2023-06-30"]',TRUE); */ SELECT diff --git a/src/queries/class-goal-use.sql b/src/queries/class-goal-use.sql index e531a9b4e8..522390386f 100644 --- a/src/queries/class-goal-use.sql +++ b/src/queries/class-goal-use.sql @@ -1,21 +1,23 @@ /** -* This query collects all the Monitoring goals used on approved reports within the defined time range. +* @name: Monitoring Goals Report +* @description: This query collects all the Monitoring goals used on approved reports within the defined time range. +* @defaultOutputName: monitoring_goals_report * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within -* - ssdi.startDate - two dates defining a range for the startDate to be within -* - ssdi.endDate - two dates defining a range for the endDate to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within +* - ssdi.startDate - date[] - two dates defining a range for the startDate to be within +* - ssdi.endDate - date[] - two dates defining a range for the endDate to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2023-10-01","2023-10-15"]',TRUE); */ SELECT diff --git a/src/queries/fake-class-goal-use.sql b/src/queries/fake-class-goal-use.sql index 16106388f2..79258d526a 100644 --- a/src/queries/fake-class-goal-use.sql +++ b/src/queries/fake-class-goal-use.sql @@ -1,21 +1,23 @@ /** -* This query collects all the fake Monitoring goals used on approved reports within the defined time range. +* @name: Fake Monitoring Goals Report +* @description: This query collects all the fake Monitoring goals used on approved reports within the defined time range. +* @defaultOutputName: fake_monitoring_goals_report * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within -* - ssdi.startDate - two dates defining a range for the startDate to be within -* - ssdi.endDate - two dates defining a range for the endDate to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within +* - ssdi.startDate - date[] - two dates defining a range for the startDate to be within +* - ssdi.endDate - date[] - two dates defining a range for the endDate to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2023-10-01","2023-10-15"]',TRUE); */ SELECT diff --git a/src/queries/fei-goals.sql b/src/queries/fei-goals.sql index 706376fa94..ee14ab3875 100644 --- a/src/queries/fei-goals.sql +++ b/src/queries/fei-goals.sql @@ -1,20 +1,22 @@ /** -* This query collects all the FEI and near FEI goals based on several criteria. +* @name: FEI Goals Report +* @description: This query collects all the FEI and near FEI goals based on several criteria. +* @defaultOutputName: fei_goals_report * * The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatium recipient names -* - ssdi.grantNumbers - one or more verbatium grant numbers -* - ssdi.goals - one or more verbatium goal text -* - ssdi.status - one or more verbatium statuses -* - ssdi.createdVia - one or more verbatium created via values -* - ssdi.onApprovedAR - true or false -* - ssdi.response - one or more verbatium response values -* - ssdi.createdbetween - two dates defining a range for the createdAt to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - string[] - one or more verbatim recipient names +* - ssdi.grantNumbers - string[] - one or more verbatim grant numbers +* - ssdi.goals - string[] - one or more verbatim goal text +* - ssdi.status - string[] - one or more verbatim statuses +* - ssdi.createdVia - string[] - one or more verbatim created via values +* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.response - string[] - one or more verbatim response values +* - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * -* zero or more SSDI flags can be set within the same transaction as the query is executed. -* The following is an example of how to set a SSDI flag: +* Zero or more SSDI flags can be set within the same transaction as the query is executed. +* The following is an example of how to set an SSDI flag: * SELECT SET_CONFIG('ssdi.createdbetween','["2023-10-01","2023-10-15"]',TRUE); */ WITH bad AS ( diff --git a/src/queries/monthly-delivery-report.sql b/src/queries/monthly-delivery-report.sql index fe299b794d..43a743218c 100644 --- a/src/queries/monthly-delivery-report.sql +++ b/src/queries/monthly-delivery-report.sql @@ -1,10 +1,14 @@ /** +* @name: Delivery report +* @description: A time boxed report of services delivered by user and role +* @defaultOutputName: delivery_report +* * This query collects all the Monitoring goals used on approved reports within the defined time range. * * The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.startDate - two dates defining a range for the startDate to be within +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.startDate - date[] - two dates defining a range for the startDate to be within * * zero or more SSDI flags can be set within the same transaction as the query is executed. * The following is an example of how to set a SSDI flag: diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js new file mode 100644 index 0000000000..7124631818 --- /dev/null +++ b/src/routes/ssdi/handlers.test.js @@ -0,0 +1,125 @@ +const request = require('supertest'); +const express = require('express'); +const db = require('../../models'); +const { + listQueryFiles, + readFlagsAndQueryFromFile, + validateFlagValues, + setFlags, + sanitizeFilename, + generateFlagString, +} = require('../../services/ssdi'); +const { listQueries, getFlags, runQuery } = require('./handlers'); + +jest.mock('../../models'); +jest.mock('../../services/ssdi'); + +const app = express(); +app.use(express.json()); +app.get('/listQueries', listQueries); +app.get('/getFlags', getFlags); +app.post('/runQuery', runQuery); + +describe('API Endpoints', () => { + describe('listQueries', () => { + it('should list all available query files', async () => { + listQueryFiles.mockReturnValue([{ name: 'Test Query', description: 'Test Description' }]); + + const response = await request(app).get('/listQueries'); + expect(response.status).toBe(200); + expect(response.body).toEqual([{ name: 'Test Query', description: 'Test Description' }]); + }); + + it('should handle errors', async () => { + listQueryFiles.mockImplementation(() => { throw new Error('Error listing query files'); }); + + const response = await request(app).get('/listQueries'); + expect(response.status).toBe(500); + expect(response.text).toBe('Error listing query files'); + }); + }); + + describe('getFlags', () => { + it('should get flags from the script', async () => { + readFlagsAndQueryFromFile.mockReturnValue({ flags: { flag1: { type: 'integer[]', description: 'Test Flag' } } }); + + const response = await request(app).get('/getFlags').query({ path: 'test/path' }); + expect(response.status).toBe(200); + expect(response.body).toEqual({ flag1: { type: 'integer[]', description: 'Test Flag' } }); + }); + + it('should return 400 if script path is not provided', async () => { + const response = await request(app).get('/getFlags'); + expect(response.status).toBe(400); + expect(response.text).toBe('Script path is required'); + }); + + it('should handle errors', async () => { + readFlagsAndQueryFromFile.mockImplementation(() => { throw new Error('Error reading flags'); }); + + const response = await request(app).get('/getFlags').query({ path: 'test/path' }); + expect(response.status).toBe(500); + expect(response.text).toBe('Error reading flags'); + }); + }); + + describe('runQuery', () => { + it('should run the query and return JSON result', async () => { + readFlagsAndQueryFromFile.mockReturnValue({ + flags: { flag1: { type: 'integer[]', description: 'Test Flag' } }, + query: 'SELECT * FROM test', + defaultOutputName: 'test_output' + }); + validateFlagValues.mockImplementation(() => {}); + setFlags.mockResolvedValue([]); + db.sequelize.query.mockResolvedValue([[{ id: 1, name: 'Test' }]]); + sanitizeFilename.mockReturnValue('test_output_flag1_1-2-3'); + generateFlagString.mockReturnValue('flag1_1-2-3'); + + const response = await request(app) + .post('/runQuery') + .query({ path: 'test/path' }) + .send({ flag1: [1, 2, 3] }); + expect(response.status).toBe(200); + expect(response.body).toEqual([{ id: 1, name: 'Test' }]); + }); + + it('should run the query and return CSV result', async () => { + readFlagsAndQueryFromFile.mockReturnValue({ + flags: { flag1: { type: 'integer[]', description: 'Test Flag' } }, + query: 'SELECT * FROM test', + defaultOutputName: 'test_output' + }); + validateFlagValues.mockImplementation(() => {}); + setFlags.mockResolvedValue([]); + db.sequelize.query.mockResolvedValue([[{ id: 1, name: 'Test' }]]); + sanitizeFilename.mockReturnValue('test_output_flag1_1-2-3'); + generateFlagString.mockReturnValue('flag1_1-2-3'); + + const response = await request(app) + .post('/runQuery') + .query({ path: 'test/path', format: 'csv' }) + .send({ flag1: [1, 2, 3] }); + expect(response.status).toBe(200); + expect(response.headers['content-type']).toBe('text/csv'); + expect(response.headers['content-disposition']).toBe('attachment; filename="test_output_flag1_1-2-3.csv"'); + }); + + it('should return 400 if script path is not provided', async () => { + const response = await request(app).post('/runQuery').send({ flag1: [1, 2, 3] }); + expect(response.status).toBe(400); + expect(response.text).toBe('Script path is required'); + }); + + it('should handle errors', async () => { + readFlagsAndQueryFromFile.mockImplementation(() => { throw new Error('Error reading query'); }); + + const response = await request(app) + .post('/runQuery') + .query({ path: 'test/path' }) + .send({ flag1: [1, 2, 3] }); + expect(response.status).toBe(500); + expect(response.text).toBe('Error executing query: Error reading query'); + }); + }); +}); diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts new file mode 100644 index 0000000000..4479c8e639 --- /dev/null +++ b/src/routes/ssdi/handlers.ts @@ -0,0 +1,77 @@ +import { stringify } from 'csv-stringify/sync'; +import express, { Request, Response } from 'express'; +import db from '../../models'; // Adjust the path according to your project structure +import { + FlagValues, + listQueryFiles, + readFlagsAndQueryFromFile, + validateFlagValues, + setFlags, + sanitizeFilename, + generateFlagString, +} from '../../services/ssdi'; + +// list all available query files with name and description +const listQueries = async (req: Request, res: Response) => { + try { + const queryFiles = listQueryFiles('./queries'); + res.json(queryFiles); + } catch (error) { + res.status(500).send('Error listing query files'); + } +}; + +// Reads the flags and query from the file and sends the flags to the UI +const getFlags = async (req: Request, res: Response) => { + const scriptPath = req.query.path as string; + if (!scriptPath) { + res.status(400).send('Script path is required'); + return; + } + + try { + const { flags } = readFlagsAndQueryFromFile(scriptPath); + res.json(flags); + } catch (error) { + res.status(500).send('Error reading flags'); + } +}; + +// Reads the flags and runs the query after setting the flags +// TODO: no current restrictions on what an authenticated user can pull. +// Example: A user with only region 1 access can pull data for region 5 +const runQuery = async (req: Request, res: Response) => { + const scriptPath = req.query.path as string; + const outputFormat = (req.query.format as string) || 'json'; + if (!scriptPath) { + res.status(400).send('Script path is required'); + return; + } + + try { + const { flags, query, defaultOutputName } = readFlagsAndQueryFromFile(scriptPath); + const flagValues: FlagValues = req.body; + validateFlagValues(flags, flagValues); + await setFlags(flags, flagValues); + const result = await db.sequelize.query(query); + + const sanitizedOutputName = sanitizeFilename(`${defaultOutputName}_${generateFlagString(flagValues)}`); + + if (outputFormat === 'csv') { + res.setHeader('Content-Type', 'text/csv'); + res.setHeader('Content-Disposition', `attachment; filename="${sanitizedOutputName}.csv"`); + stringify(result[0], { header: true }).pipe(res); + } else { + res.json(result[0]); + } + } catch (error) { + console.log(error); + res.status(500).send(`Error executing query: ${error.message}`); + } +}; + +export { + listQueries, + getFlags, + runQuery, +}; diff --git a/src/routes/ssdi/index.ts b/src/routes/ssdi/index.ts new file mode 100644 index 0000000000..3e8f3ee347 --- /dev/null +++ b/src/routes/ssdi/index.ts @@ -0,0 +1,15 @@ +import express from 'express'; +import transactionWrapper from '../transactionWrapper'; +import authMiddleware from '../../middleware/authMiddleware'; +import { + listQueries, + getFlags, + runQuery, +} from './handlers'; + +const router = express.Router(); +router.get('/list-queries', authMiddleware, transactionWrapper(listQueries)); +router.get('/get-flags', authMiddleware, transactionWrapper(getFlags)); +router.get('/run-query', authMiddleware, transactionWrapper(runQuery)); + +export default router; diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js new file mode 100644 index 0000000000..2c53dde6e4 --- /dev/null +++ b/src/services/ssdi.test.js @@ -0,0 +1,171 @@ +import fs from 'fs'; +import path from 'path'; +import db from '../models'; +import { + listQueryFiles, + readNameAndDescriptionFromFile, + readFlagsAndQueryFromFile, + validateFlagValues, + validateType, + setFlags, + sanitizeFilename, + generateFlagString, +} from './ssdi'; + +// Mock fs and db +jest.mock('fs'); +jest.mock('../models', () => ({ + sequelize: { + query: jest.fn(), + QueryTypes: { SELECT: 'SELECT' }, + }, +})); + +describe('ssdi', () => { + describe('readNameAndDescriptionFromFile', () => { + it('should read name, description, and default output name from file', () => { + const fileContents = ` + @name: TestName + @description: Test description + @defaultOutputName: test_output + `; + fs.readFileSync.mockReturnValue(fileContents); + + const result = readNameAndDescriptionFromFile('test/path'); + expect(result).toEqual({ + name: 'TestName', + description: 'Test description', + defaultOutputName: 'test_output', + }); + }); + }); + + describe('listQueryFiles', () => { + it('should list all query files with name and description', () => { + fs.readdirSync.mockReturnValue(['file1.sql', 'file2.sql']); + fs.readFileSync.mockReturnValue(` + @name: TestName + @description: Test description + @defaultOutputName: test_output + `); + + const result = listQueryFiles('test/directory'); + expect(result).toEqual([ + { + name: 'TestName', + description: 'Test description', + filePath: 'test/directory/file1.sql', + defaultOutputName: 'test_output', + }, + { + name: 'TestName', + description: 'Test description', + filePath: 'test/directory/file2.sql', + defaultOutputName: 'test_output', + }, + ]); + }); + }); + + describe('readFlagsAndQueryFromFile', () => { + it('should read flags and query from file', () => { + const fileContents = ` + /* + * - ssdi.flag1 - integer[] - Flag description + * @defaultOutputName: test_output + */ + SELECT * FROM table;\n`; + fs.readFileSync.mockReturnValue(fileContents); + + const result = readFlagsAndQueryFromFile('test/path'); + expect(result).toEqual({ + flags: { + flag1: { + type: 'integer[]', + description: 'Flag description', + }, + }, + query: 'SELECT * FROM table;', + defaultOutputName: 'test_output', + }); + }); + }); + + describe('validateType', () => { + it('should validate integer[] type', () => { + expect(validateType('integer[]', [1, 2, 3])).toBe(true); + expect(validateType('integer[]', [1, '2', 3])).toBe(false); + }); + + it('should validate date[] type', () => { + expect(validateType('date[]', ['2021-01-01', '2021-02-01'])).toBe(true); + expect(validateType('date[]', ['2021-01-01', 'invalid-date'])).toBe(false); + }); + + it('should validate string[] type', () => { + expect(validateType('string[]', ['hello', 'world'])).toBe(true); + expect(validateType('string[]', ['hello', 123])).toBe(false); + }); + + it('should validate boolean type', () => { + expect(validateType('boolean', true)).toBe(true); + expect(validateType('boolean', false)).toBe(true); + expect(validateType('boolean', 'true')).toBe(false); + }); + + it('should throw an error for unknown type', () => { + expect(() => validateType('unknown[]', ['test'])).toThrow('Unknown type: unknown[]'); + }); + }); + + describe('validateFlagValues', () => { + const flags = { + flag1: { type: 'integer[]', description: 'Flag description' }, + }; + + it('should validate flag values correctly', () => { + expect(() => validateFlagValues(flags, { flag1: [1, 2, 3] })).not.toThrow(); + }); + + it('should throw error for invalid flag value', () => { + expect(() => validateFlagValues(flags, { flag1: [1, '2', 3] })).toThrow( + 'Invalid type for flag flag1: expected integer[]', + ); + }); + + it('should throw error for invalid flag', () => { + expect(() => validateFlagValues(flags, { invalidFlag: [1, 2, 3] })).toThrow( + 'Invalid flag: invalidFlag', + ); + }); + }); + + describe('setFlags', () => { + it('should set flags in the database', async () => { + const flags = { flag1: { type: 'integer[]', description: 'Flag description' } }; + const flagValues = { flag1: [1, 2, 3] }; + + db.sequelize.query.mockResolvedValue([{ success: true }]); + + const result = await setFlags(flags, flagValues); + expect(result).toEqual([[{ success: true }]]); + expect(db.sequelize.query).toHaveBeenCalledWith('SELECT set_config($1, $2, false)', { + bind: ['ssdi.flag1', JSON.stringify([1, 2, 3])], + type: db.sequelize.QueryTypes.SELECT, + }); + }); + }); + + describe('sanitizeFilename', () => { + it('should sanitize the filename', () => { + expect(sanitizeFilename('test@file#name!')).toBe('test_file_name_'); + }); + }); + + describe('generateFlagString', () => { + it('should generate a string representation of the flag values', () => { + const flagValues = { flag1: [1, 2, 3], flag2: 'value' }; + expect(generateFlagString(flagValues)).toBe('flag1_1-2-3_flag2_value'); + }); + }); +}); diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts new file mode 100644 index 0000000000..d586cfaf33 --- /dev/null +++ b/src/services/ssdi.ts @@ -0,0 +1,174 @@ +import fs from 'fs'; +import path from 'path'; +import db from '../models'; + +interface Flag { + type: string; + description: string; +} + +interface Flags { + [key: string]: Flag; +} + +interface FlagValues { + [key: string]: any; +} + +interface QueryFile { + name: string; + description: string; + filePath: string; + defaultOutputName: string; +} + +// Helper function to read name, description, and default output name from a file +const readNameAndDescriptionFromFile = ( + filePath: string, +): { + name: string; + description: string; + defaultOutputName: string; +} => { + const fileContents = fs.readFileSync(filePath, 'utf8'); + const nameMatch = fileContents.match(/@name:\s*(.*)/); + const descriptionMatch = fileContents.match(/@description:\s*(.*)/); + const defaultOutputNameMatch = fileContents.match(/@defaultOutputName:\s*(.*)/); + return { + name: nameMatch ? nameMatch[1].trim() : 'Unknown', // TODO: use script file name as default + description: descriptionMatch ? descriptionMatch[1].trim() : 'No description available', + defaultOutputName: defaultOutputNameMatch ? defaultOutputNameMatch[1].trim() : 'output', // TODO: use script file name as default + }; +}; + +// Helper function to list all query files with name and description +const listQueryFiles = (directory: string): QueryFile[] => { + const files = fs.readdirSync(directory); + return files.map((file) => { + const filePath = path.join(directory, file); + const { name, description, defaultOutputName } = readNameAndDescriptionFromFile(filePath); + return { + name, + description, + filePath, + defaultOutputName, + }; + }); +}; + +// Helper function to read flags and the query from the file +const readFlagsAndQueryFromFile = ( + filePath: string, +): { + flags: Flags; + query: string; + defaultOutputName: string; +} => { + const fileContents = fs.readFileSync(filePath, 'utf8'); + const lines = fileContents.split('\n'); + const flags: Flags = {}; + const queryLines: string[] = []; + let inCommentBlock = false; + let defaultOutputName = 'output'; + + lines.forEach((line) => { + if (line.trim().startsWith('/*')) { + inCommentBlock = true; + } else if (line.trim().endsWith('*/')) { + inCommentBlock = false; + return; + } + + if (inCommentBlock) { + const flagMatch = line.match(/ssdi\.(\w+) - (\w+\[\]) - (.*)/); + const defaultOutputNameMatch = line.match(/@defaultOutputName:\s*(.*)/); + if (flagMatch) { + flags[flagMatch[1]] = { type: flagMatch[2], description: flagMatch[3] }; + } + if (defaultOutputNameMatch) { + defaultOutputName = defaultOutputNameMatch[1].trim(); + } + } else { + queryLines.push(line); + } + }); + + // Join the query lines and remove leading and trailing blank lines + const query = queryLines + .join('\n') + .replace(/^\s*\n|\s*\n$/g, '') + .replace(/^\s*/, ''); + + return { flags, query, defaultOutputName }; +}; + +// Function to validate the type of the flag values +const validateType = ( + expectedType: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + value: any, +): boolean => { + switch (expectedType) { + case 'integer[]': + return Array.isArray(value) && value.every((v) => Number.isInteger(v)); + case 'date[]': + return Array.isArray(value) && value.every((v) => !Number.isNaN(Date.parse(v))); + case 'string[]': + return Array.isArray(value) && value.every((v) => typeof v === 'string'); + case 'boolean': + return typeof value === 'boolean'; + default: + throw new Error(`Unknown type: ${expectedType}`); + } +}; + +// Helper function to validate flag values +const validateFlagValues = (flags: Flags, flagValues: FlagValues): void => { + Object.entries(flagValues).forEach(([key, value]) => { + if (!flags[key]) { + throw new Error(`Invalid flag: ${key}`); + } + const expectedType = flags[key].type; + if (!validateType(expectedType, value)) { + throw new Error(`Invalid type for flag ${key}: expected ${expectedType}`); + } + }); +}; + +// Helper function to set flags in the database +const setFlags = async ( + flags: Flags, + flagValues: FlagValues, + // eslint-disable-next-line @typescript-eslint/no-explicit-any +): Promise => Promise.all(Object.entries(flagValues).map(async ( + [key, value], +) => db.sequelize.query( + 'SELECT set_config($1, $2, false)', + { + bind: [`ssdi.${key}`, JSON.stringify(value)], + type: db.sequelize.QueryTypes.SELECT, + }, +))); + +// Helper function to sanitize filenames +const sanitizeFilename = (filename: string): string => filename.replace(/[^a-zA-Z0-9-_]/g, '_'); + +// Helper function to generate a string representation of the flag values +const generateFlagString = (flagValues: FlagValues): string => Object.entries(flagValues) + .map(([key, value]) => `${key}_${Array.isArray(value) ? value.join('-') : value}`) + .join('_'); + +export { + Flag, + Flags, + FlagValues, + QueryFile, + listQueryFiles, + readNameAndDescriptionFromFile, + readFlagsAndQueryFromFile, + validateFlagValues, + validateType, + setFlags, + sanitizeFilename, + generateFlagString, +}; From b196ec21ce58765c9d12e915130e172e7f400f10 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 11 Jun 2024 15:00:06 -0700 Subject: [PATCH 02/60] lint --- src/routes/ssdi/handlers.test.js | 4 ++-- src/routes/ssdi/handlers.ts | 1 - src/services/ssdi.ts | 1 + 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 7124631818..8223a2b64f 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -68,7 +68,7 @@ describe('API Endpoints', () => { readFlagsAndQueryFromFile.mockReturnValue({ flags: { flag1: { type: 'integer[]', description: 'Test Flag' } }, query: 'SELECT * FROM test', - defaultOutputName: 'test_output' + defaultOutputName: 'test_output', }); validateFlagValues.mockImplementation(() => {}); setFlags.mockResolvedValue([]); @@ -88,7 +88,7 @@ describe('API Endpoints', () => { readFlagsAndQueryFromFile.mockReturnValue({ flags: { flag1: { type: 'integer[]', description: 'Test Flag' } }, query: 'SELECT * FROM test', - defaultOutputName: 'test_output' + defaultOutputName: 'test_output', }); validateFlagValues.mockImplementation(() => {}); setFlags.mockResolvedValue([]); diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index 4479c8e639..7ac39b7295 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -65,7 +65,6 @@ const runQuery = async (req: Request, res: Response) => { res.json(result[0]); } } catch (error) { - console.log(error); res.status(500).send(`Error executing query: ${error.message}`); } }; diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index d586cfaf33..fb342c41af 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -12,6 +12,7 @@ interface Flags { } interface FlagValues { + // eslint-disable-next-line @typescript-eslint/no-explicit-any [key: string]: any; } From 9ac37ad23a5e0b648c390b2dc7043b6da366f6a7 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 11 Jun 2024 16:07:40 -0700 Subject: [PATCH 03/60] Update handlers.ts --- src/routes/ssdi/handlers.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index 7ac39b7295..c04ffda851 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -1,5 +1,5 @@ -import { stringify } from 'csv-stringify/sync'; -import express, { Request, Response } from 'express'; +import stringify from 'csv-stringify/lib/sync'; +import { Request, Response } from 'express'; import db from '../../models'; // Adjust the path according to your project structure import { FlagValues, From 31def3e8dfec5856c3486737c9d125c114ffb2df Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 09:38:03 -0700 Subject: [PATCH 04/60] updates --- src/routes/ssdi/handlers.test.js | 5 ++- src/routes/ssdi/handlers.ts | 3 +- src/services/ssdi.test.js | 76 ++++++++++++++++++++++++++++---- src/services/ssdi.ts | 67 ++++++++++++++++++++++------ 4 files changed, 126 insertions(+), 25 deletions(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 8223a2b64f..102e9114a9 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -8,6 +8,7 @@ const { setFlags, sanitizeFilename, generateFlagString, + executeQuery, // Updated to executeQuery } = require('../../services/ssdi'); const { listQueries, getFlags, runQuery } = require('./handlers'); @@ -72,7 +73,7 @@ describe('API Endpoints', () => { }); validateFlagValues.mockImplementation(() => {}); setFlags.mockResolvedValue([]); - db.sequelize.query.mockResolvedValue([[{ id: 1, name: 'Test' }]]); + executeQuery.mockResolvedValue([[{ id: 1, name: 'Test' }]]); sanitizeFilename.mockReturnValue('test_output_flag1_1-2-3'); generateFlagString.mockReturnValue('flag1_1-2-3'); @@ -92,7 +93,7 @@ describe('API Endpoints', () => { }); validateFlagValues.mockImplementation(() => {}); setFlags.mockResolvedValue([]); - db.sequelize.query.mockResolvedValue([[{ id: 1, name: 'Test' }]]); + executeQuery.mockResolvedValue([[{ id: 1, name: 'Test' }]]); sanitizeFilename.mockReturnValue('test_output_flag1_1-2-3'); generateFlagString.mockReturnValue('flag1_1-2-3'); diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index c04ffda851..f6ed220d67 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -9,6 +9,7 @@ import { setFlags, sanitizeFilename, generateFlagString, + executeQuery, } from '../../services/ssdi'; // list all available query files with name and description @@ -53,7 +54,7 @@ const runQuery = async (req: Request, res: Response) => { const flagValues: FlagValues = req.body; validateFlagValues(flags, flagValues); await setFlags(flags, flagValues); - const result = await db.sequelize.query(query); + const result = await executeQuery(query); const sanitizedOutputName = sanitizeFilename(`${defaultOutputName}_${generateFlagString(flagValues)}`); diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js index 2c53dde6e4..0095aad63c 100644 --- a/src/services/ssdi.test.js +++ b/src/services/ssdi.test.js @@ -10,6 +10,7 @@ import { setFlags, sanitizeFilename, generateFlagString, + executeQuery, } from './ssdi'; // Mock fs and db @@ -17,7 +18,7 @@ jest.mock('fs'); jest.mock('../models', () => ({ sequelize: { query: jest.fn(), - QueryTypes: { SELECT: 'SELECT' }, + QueryTypes: { SELECT: 'SELECT', RAW: 'RAW' }, }, })); @@ -31,13 +32,27 @@ describe('ssdi', () => { `; fs.readFileSync.mockReturnValue(fileContents); - const result = readNameAndDescriptionFromFile('test/path'); + const result = readNameAndDescriptionFromFile('test/path.sql'); expect(result).toEqual({ name: 'TestName', description: 'Test description', defaultOutputName: 'test_output', }); }); + + it('should use filename as default for name and default output name if not provided', () => { + const fileContents = ` + @description: Test description + `; + fs.readFileSync.mockReturnValue(fileContents); + + const result = readNameAndDescriptionFromFile('test/path.sql'); + expect(result).toEqual({ + name: 'path', + description: 'Test description', + defaultOutputName: 'path', + }); + }); }); describe('listQueryFiles', () => { @@ -77,11 +92,12 @@ describe('ssdi', () => { SELECT * FROM table;\n`; fs.readFileSync.mockReturnValue(fileContents); - const result = readFlagsAndQueryFromFile('test/path'); + const result = readFlagsAndQueryFromFile('test/path.sql'); expect(result).toEqual({ flags: { flag1: { type: 'integer[]', + name: 'flag1', description: 'Flag description', }, }, @@ -107,10 +123,9 @@ describe('ssdi', () => { expect(validateType('string[]', ['hello', 123])).toBe(false); }); - it('should validate boolean type', () => { - expect(validateType('boolean', true)).toBe(true); - expect(validateType('boolean', false)).toBe(true); - expect(validateType('boolean', 'true')).toBe(false); + it('should validate boolean[] type', () => { + expect(validateType('boolean[]', [true, false])).toBe(true); + expect(validateType('boolean[]', [true, 'false'])).toBe(false); }); it('should throw an error for unknown type', () => { @@ -120,7 +135,7 @@ describe('ssdi', () => { describe('validateFlagValues', () => { const flags = { - flag1: { type: 'integer[]', description: 'Flag description' }, + flag1: { type: 'integer[]', name: 'flag1', description: 'Flag description' }, }; it('should validate flag values correctly', () => { @@ -142,7 +157,7 @@ describe('ssdi', () => { describe('setFlags', () => { it('should set flags in the database', async () => { - const flags = { flag1: { type: 'integer[]', description: 'Flag description' } }; + const flags = { flag1: { type: 'integer[]', name: 'flag1', description: 'Flag description' } }; const flagValues = { flag1: [1, 2, 3] }; db.sequelize.query.mockResolvedValue([{ success: true }]); @@ -168,4 +183,47 @@ describe('ssdi', () => { expect(generateFlagString(flagValues)).toBe('flag1_1-2-3_flag2_value'); }); }); + + describe('executeQuery', () => { + afterEach(() => { + jest.clearAllMocks(); + }); + + it('should throw an error if the query is not a string', async () => { + const invalidQuery = 123; + + await expect(executeQuery(invalidQuery as unknown as string)).rejects.toThrow('The query must be a string'); + }); + + it('should set the transaction to READ ONLY', async () => { + const mockQuery = 'SELECT * FROM users;'; + db.sequelize.query.mockResolvedValueOnce([]); + + await executeQuery(mockQuery); + + expect(db.sequelize.query).toHaveBeenCalledWith('SET TRANSACTION READ ONLY;', { type: db.sequelize.QueryTypes.RAW }); + }); + + it('should return the result of the query', async () => { + const mockQuery = 'SELECT * FROM users;'; + const mockResult = [{ id: 1, name: 'John Doe' }]; + db.sequelize.query + .mockResolvedValueOnce([]) // for SET TRANSACTION READ ONLY + .mockResolvedValueOnce(mockResult); // for the actual query + + const result = await executeQuery(mockQuery); + + expect(result).toEqual(mockResult); + }); + + it('should throw an error if the query fails', async () => { + const mockQuery = 'SELECT * FROM users;'; + const mockError = new Error('Query execution failed'); + db.sequelize.query + .mockResolvedValueOnce([]) // for SET TRANSACTION READ ONLY + .mockRejectedValueOnce(mockError); // for the actual query + + await expect(executeQuery(mockQuery)).rejects.toThrow(`Query failed: ${mockError.message}`); + }); + }); }); diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index fb342c41af..5d67f05504 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -1,9 +1,11 @@ import fs from 'fs'; import path from 'path'; +import { QueryTypes } from 'sequelize'; import db from '../models'; interface Flag { type: string; + name: string; description: string; } @@ -23,6 +25,16 @@ interface QueryFile { defaultOutputName: string; } +interface CachedQuery { + flags: Flags; + query: string; + defaultOutputName: string; +} + +// Caches to store the parsed query files and their metadata +const queryFileCache: Map = new Map(); +const queryDataCache: Map = new Map(); + // Helper function to read name, description, and default output name from a file const readNameAndDescriptionFromFile = ( filePath: string, @@ -35,10 +47,11 @@ const readNameAndDescriptionFromFile = ( const nameMatch = fileContents.match(/@name:\s*(.*)/); const descriptionMatch = fileContents.match(/@description:\s*(.*)/); const defaultOutputNameMatch = fileContents.match(/@defaultOutputName:\s*(.*)/); + const fileName = path.basename(filePath, path.extname(filePath)); return { - name: nameMatch ? nameMatch[1].trim() : 'Unknown', // TODO: use script file name as default + name: nameMatch ? nameMatch[1].trim() : fileName, // Use script file name as default description: descriptionMatch ? descriptionMatch[1].trim() : 'No description available', - defaultOutputName: defaultOutputNameMatch ? defaultOutputNameMatch[1].trim() : 'output', // TODO: use script file name as default + defaultOutputName: defaultOutputNameMatch ? defaultOutputNameMatch[1].trim() : fileName, // Use script file name as default }; }; @@ -47,24 +60,30 @@ const listQueryFiles = (directory: string): QueryFile[] => { const files = fs.readdirSync(directory); return files.map((file) => { const filePath = path.join(directory, file); + if (queryFileCache.has(filePath)) { + return queryFileCache.get(filePath) as QueryFile; + } const { name, description, defaultOutputName } = readNameAndDescriptionFromFile(filePath); - return { + const queryFile: QueryFile = { name, description, filePath, defaultOutputName, }; + queryFileCache.set(filePath, queryFile); + return queryFile; }); }; // Helper function to read flags and the query from the file const readFlagsAndQueryFromFile = ( filePath: string, -): { - flags: Flags; - query: string; - defaultOutputName: string; -} => { +): CachedQuery => { + // Check if the query data is already cached + if (queryDataCache.has(filePath)) { + return queryDataCache.get(filePath) as CachedQuery; + } + const fileContents = fs.readFileSync(filePath, 'utf8'); const lines = fileContents.split('\n'); const flags: Flags = {}; @@ -81,10 +100,10 @@ const readFlagsAndQueryFromFile = ( } if (inCommentBlock) { - const flagMatch = line.match(/ssdi\.(\w+) - (\w+\[\]) - (.*)/); + const flagMatch = line.match(/ - ssdi\.(\w+) - (\w+\[\]) - (.*)/); const defaultOutputNameMatch = line.match(/@defaultOutputName:\s*(.*)/); if (flagMatch) { - flags[flagMatch[1]] = { type: flagMatch[2], description: flagMatch[3] }; + flags[flagMatch[1]] = { type: flagMatch[2], name: flagMatch[1], description: flagMatch[3] }; } if (defaultOutputNameMatch) { defaultOutputName = defaultOutputNameMatch[1].trim(); @@ -100,7 +119,11 @@ const readFlagsAndQueryFromFile = ( .replace(/^\s*\n|\s*\n$/g, '') .replace(/^\s*/, ''); - return { flags, query, defaultOutputName }; + const cachedQuery: CachedQuery = { flags, query, defaultOutputName }; + // Cache the parsed query data + queryDataCache.set(filePath, cachedQuery); + + return cachedQuery; }; // Function to validate the type of the flag values @@ -116,8 +139,8 @@ const validateType = ( return Array.isArray(value) && value.every((v) => !Number.isNaN(Date.parse(v))); case 'string[]': return Array.isArray(value) && value.every((v) => typeof v === 'string'); - case 'boolean': - return typeof value === 'boolean'; + case 'boolean[]': + return Array.isArray(value) && value.every((v) => typeof v === 'boolean'); default: throw new Error(`Unknown type: ${expectedType}`); } @@ -159,6 +182,23 @@ const generateFlagString = (flagValues: FlagValues): string => Object.entries(fl .map(([key, value]) => `${key}_${Array.isArray(value) ? value.join('-') : value}`) .join('_'); +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const executeQuery = async (query: string): Promise => { + if (typeof query !== 'string') { + throw new Error('The query must be a string'); + } + + try { + // Set transaction to READ ONLY, this will fail the transaction if any tables are modified + await db.sequelize.query('SET TRANSACTION READ ONLY;', { type: QueryTypes.RAW }); + + const result = await db.sequelize.query(query, { type: QueryTypes.SELECT }); + return result; + } catch (error) { + throw new Error(`Query failed: ${error.message}`); + } +} + export { Flag, Flags, @@ -172,4 +212,5 @@ export { setFlags, sanitizeFilename, generateFlagString, + executeQuery, }; From 91551ad64dc336e8c55603f021b962ec56352bfe Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 09:42:48 -0700 Subject: [PATCH 05/60] all filters are arrays --- src/queries/class-goal-dataset.sql | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/queries/class-goal-dataset.sql b/src/queries/class-goal-dataset.sql index e5f6c035f0..a0af84f1da 100644 --- a/src/queries/class-goal-dataset.sql +++ b/src/queries/class-goal-dataset.sql @@ -11,7 +11,7 @@ * - ssdi.goals - string[] - one or more verbatim goal text * - ssdi.status - string[] - one or more verbatim statuses * - ssdi.createdVia - string[] - one or more verbatim created via values -* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.onApprovedAR - boolean[] - true or false * - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * * Zero or more SSDI flags can be set within the same transaction as the query is executed. @@ -55,7 +55,7 @@ AND FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.grantNumbers', true), ''),'[]')::json) AS value )) AND --- Filter for status if ssdi.goals is defined +-- Filter for goals if ssdi.goals is defined (NULLIF(current_setting('ssdi.goals', true), '') IS NULL OR g.name in ( SELECT value::text AS my_array @@ -78,9 +78,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR g."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for createdAt dates between two values if ssdi.createdbetween is defined From 4736fe42cb50b081c2766d5134768764db7e7165 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 09:50:08 -0700 Subject: [PATCH 06/60] all queries used in the ssdi must use only arrays for all flags to simplify the interface --- src/queries/class-goal-use.sql | 9 +++++---- src/queries/fake-class-goal-use.sql | 16 ++++++++++++---- src/queries/fei-goals.sql | 9 +++++---- 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/src/queries/class-goal-use.sql b/src/queries/class-goal-use.sql index 522390386f..2f51b193ad 100644 --- a/src/queries/class-goal-use.sql +++ b/src/queries/class-goal-use.sql @@ -11,7 +11,7 @@ * - ssdi.goals - string[] - one or more verbatim goal text * - ssdi.status - string[] - one or more verbatim statuses * - ssdi.createdVia - string[] - one or more verbatim created via values -* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.onApprovedAR - boolean[] - true or false * - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * - ssdi.startDate - date[] - two dates defining a range for the startDate to be within * - ssdi.endDate - date[] - two dates defining a range for the endDate to be within @@ -80,9 +80,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR g."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for createdAt dates between two values if ssdi.createdbetween is defined diff --git a/src/queries/fake-class-goal-use.sql b/src/queries/fake-class-goal-use.sql index 79258d526a..f46721aca5 100644 --- a/src/queries/fake-class-goal-use.sql +++ b/src/queries/fake-class-goal-use.sql @@ -11,7 +11,7 @@ * - ssdi.goals - string[] - one or more verbatim goal text * - ssdi.status - string[] - one or more verbatim statuses * - ssdi.createdVia - string[] - one or more verbatim created via values -* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.onApprovedAR - boolean[] - true or false * - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * - ssdi.startDate - date[] - two dates defining a range for the startDate to be within * - ssdi.endDate - date[] - two dates defining a range for the endDate to be within @@ -65,6 +65,13 @@ AND FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.grantNumbers', true), ''),'[]')::json) AS value )) AND +-- Filter for goals if ssdi.goals is defined +(NULLIF(current_setting('ssdi.goals', true), '') IS NULL + OR g.name in ( + SELECT value::text AS my_array + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.goals', true), ''),'[]')::json) AS value + )) +AND -- Filter for status if ssdi.status is defined (NULLIF(current_setting('ssdi.status', true), '') IS NULL OR g.status in ( @@ -81,9 +88,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR g."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for createdAt dates between two values if ssdi.createdbetween is defined diff --git a/src/queries/fei-goals.sql b/src/queries/fei-goals.sql index ee14ab3875..ef68fdad8d 100644 --- a/src/queries/fei-goals.sql +++ b/src/queries/fei-goals.sql @@ -11,7 +11,7 @@ * - ssdi.goals - string[] - one or more verbatim goal text * - ssdi.status - string[] - one or more verbatim statuses * - ssdi.createdVia - string[] - one or more verbatim created via values -* - ssdi.onApprovedAR - boolean - true or false +* - ssdi.onApprovedAR - boolean[] - true or false * - ssdi.response - string[] - one or more verbatim response values * - ssdi.createdbetween - date[] - two dates defining a range for the createdAt to be within * @@ -106,9 +106,10 @@ AND AND -- Filter for onApprovedAR if ssdi.onApprovedAR is defined (NULLIF(current_setting('ssdi.onApprovedAR', true), '') IS NULL - OR b."onApprovedAR" in ( - SELECT value::BOOLEAN AS my_array - FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + OR EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(NULLIF(current_setting('ssdi.onApprovedAR', true), ''),'[]')::json) AS value + WHERE value::boolean = true )) AND -- Filter for response if ssdi.response is defined From 428e0dae733b95b5509f56b22dbdd24d83b38f2f Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 10:42:26 -0700 Subject: [PATCH 07/60] adding regional restrictions --- src/policies/generic.test.js | 55 +++++++++++++++++++++++++ src/policies/generic.ts | 48 ++++++++++++++++++++++ src/routes/ssdi/handlers.test.js | 69 ++++++++++++++++++++++---------- src/routes/ssdi/handlers.ts | 24 ++++++++++- 4 files changed, 173 insertions(+), 23 deletions(-) create mode 100644 src/policies/generic.test.js create mode 100644 src/policies/generic.ts diff --git a/src/policies/generic.test.js b/src/policies/generic.test.js new file mode 100644 index 0000000000..f67c834710 --- /dev/null +++ b/src/policies/generic.test.js @@ -0,0 +1,55 @@ +import Generic from '../path/to/Generic'; +import SCOPES from '../middleware/scopeConstants'; + +describe('Generic', () => { + const user = { + permissions: [ + { scopeId: SCOPES.READ_REPORTS, regionId: 1 }, + { scopeId: SCOPES.APPROVE_REPORTS, regionId: 2 }, + { scopeId: SCOPES.READ_WRITE_REPORTS, regionId: 3 }, + ], + }; + + test('should correctly initialize with user', () => { + const generic = new Generic(user); + expect(generic.user).toBe(user); + }); + + test('should return true if user can access a region', () => { + const generic = new Generic(user); + expect(generic.canAccessRegion(1)).toBe(true); + expect(generic.canAccessRegion(2)).toBe(true); + expect(generic.canAccessRegion(3)).toBe(true); + }); + + test('should return false if user cannot access a region', () => { + const generic = new Generic(user); + expect(generic.canAccessRegion(4)).toBe(false); + }); + + test('should filter regions based on user permissions', () => { + const generic = new Generic(user); + const regionList = [1, 2, 3, 4]; + const filteredRegions = generic.filterRegions(regionList); + expect(filteredRegions).toEqual([1, 2, 3]); + }); + + test('should return all accessible regions if region list is empty', () => { + const generic = new Generic(user); + const filteredRegions = generic.filterRegions([]); + expect(filteredRegions).toEqual([1, 2, 3]); + }); + + test('should return an empty array if user has no permissions', () => { + const emptyUser = { permissions: [] }; + const generic = new Generic(emptyUser); + const filteredRegions = generic.filterRegions([]); + expect(filteredRegions).toEqual([]); + }); + + test('should return all accessible regions', () => { + const generic = new Generic(user); + const accessibleRegions = generic.getAllAccessibleRegions(); + expect(accessibleRegions).toEqual([1, 2, 3]); + }); +}); diff --git a/src/policies/generic.ts b/src/policies/generic.ts new file mode 100644 index 0000000000..23c069098a --- /dev/null +++ b/src/policies/generic.ts @@ -0,0 +1,48 @@ +import SCOPES from '../middleware/scopeConstants'; + +interface Permission { + scopeId: number; + regionId: number; +} + +interface User { + permissions: Permission[]; + roles?: { name: string }[]; +} + +export default class Generic { + user: User; + + constructor(user: User) { + this.user = user; + } + + canAccessRegion(region: number): boolean { + // Check if the user has any scopeId for the given region + return this.user.permissions.some((permission) => ( + Object.values(SCOPES).includes(permission.scopeId) + && permission.regionId === region + )); + } + + filterRegions(regionList: number[]): number[] { + // If the list passed is empty, return all regions the user has rights for + if (regionList.length === 0) { + return this.getAllAccessibleRegions(); + } + + // Return the list with only the regions the user has a right for + return regionList.filter((region) => this.canAccessRegion(region)); + } + + getAllAccessibleRegions(): number[] { + // Return all regions the user has rights for + const accessibleRegions = new Set(); + this.user.permissions.forEach((permission) => { + if (Object.values(SCOPES).includes(permission.scopeId)) { + accessibleRegions.add(permission.regionId); + } + }); + return Array.from(accessibleRegions); + } +} diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 102e9114a9..aba327abd2 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -8,12 +8,16 @@ const { setFlags, sanitizeFilename, generateFlagString, - executeQuery, // Updated to executeQuery + executeQuery, + currentUserId, + userById } = require('../../services/ssdi'); const { listQueries, getFlags, runQuery } = require('./handlers'); +const Generic = require('../../middleware/Generic'); jest.mock('../../models'); jest.mock('../../services/ssdi'); +jest.mock('../../middleware/Generic'); const app = express(); app.use(express.json()); @@ -65,60 +69,83 @@ describe('API Endpoints', () => { }); describe('runQuery', () => { - it('should run the query and return JSON result', async () => { + const user = { + id: 1, + permissions: [ + { scopeId: 1, regionId: 1 }, + { scopeId: 2, regionId: 2 }, + { scopeId: 3, regionId: 3 } + ] + }; + + beforeEach(() => { readFlagsAndQueryFromFile.mockReturnValue({ - flags: { flag1: { type: 'integer[]', description: 'Test Flag' } }, + flags: { recipientIds: { type: 'integer[]', description: 'Test Flag' } }, query: 'SELECT * FROM test', defaultOutputName: 'test_output', }); validateFlagValues.mockImplementation(() => {}); setFlags.mockResolvedValue([]); executeQuery.mockResolvedValue([[{ id: 1, name: 'Test' }]]); - sanitizeFilename.mockReturnValue('test_output_flag1_1-2-3'); - generateFlagString.mockReturnValue('flag1_1-2-3'); + sanitizeFilename.mockReturnValue('test_output_recipientIds_1-2-3'); + generateFlagString.mockReturnValue('recipientIds_1-2-3'); + currentUserId.mockResolvedValue(user.id); + userById.mockResolvedValue(user); + Generic.mockImplementation(() => ({ + filterRegions: jest.fn((ids) => ids.filter(id => id <= 3)), + getAllAccessibleRegions: jest.fn(() => [1, 2, 3]) + })); + }); + it('should run the query and return JSON result', async () => { const response = await request(app) .post('/runQuery') .query({ path: 'test/path' }) - .send({ flag1: [1, 2, 3] }); + .send({ recipientIds: [1, 2, 3, 4] }); + expect(response.status).toBe(200); expect(response.body).toEqual([{ id: 1, name: 'Test' }]); }); it('should run the query and return CSV result', async () => { - readFlagsAndQueryFromFile.mockReturnValue({ - flags: { flag1: { type: 'integer[]', description: 'Test Flag' } }, - query: 'SELECT * FROM test', - defaultOutputName: 'test_output', - }); - validateFlagValues.mockImplementation(() => {}); - setFlags.mockResolvedValue([]); - executeQuery.mockResolvedValue([[{ id: 1, name: 'Test' }]]); - sanitizeFilename.mockReturnValue('test_output_flag1_1-2-3'); - generateFlagString.mockReturnValue('flag1_1-2-3'); - const response = await request(app) .post('/runQuery') .query({ path: 'test/path', format: 'csv' }) - .send({ flag1: [1, 2, 3] }); + .send({ recipientIds: [1, 2, 3, 4] }); + expect(response.status).toBe(200); expect(response.headers['content-type']).toBe('text/csv'); - expect(response.headers['content-disposition']).toBe('attachment; filename="test_output_flag1_1-2-3.csv"'); + expect(response.headers['content-disposition']).toBe('attachment; filename="test_output_recipientIds_1-2-3.csv"'); }); it('should return 400 if script path is not provided', async () => { - const response = await request(app).post('/runQuery').send({ flag1: [1, 2, 3] }); + const response = await request(app).post('/runQuery').send({ recipientIds: [1, 2, 3] }); expect(response.status).toBe(400); expect(response.text).toBe('Script path is required'); }); + it('should return 401 if recipientIds is an empty set', async () => { + Generic.mockImplementation(() => ({ + filterRegions: jest.fn(() => []), + getAllAccessibleRegions: jest.fn(() => []) + })); + + const response = await request(app) + .post('/runQuery') + .query({ path: 'test/path' }) + .send({}); + + expect(response.status).toBe(401); + }); + it('should handle errors', async () => { readFlagsAndQueryFromFile.mockImplementation(() => { throw new Error('Error reading query'); }); const response = await request(app) .post('/runQuery') .query({ path: 'test/path' }) - .send({ flag1: [1, 2, 3] }); + .send({ recipientIds: [1, 2, 3] }); + expect(response.status).toBe(500); expect(response.text).toBe('Error executing query: Error reading query'); }); diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index f6ed220d67..9046b4d195 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -11,6 +11,7 @@ import { generateFlagString, executeQuery, } from '../../services/ssdi'; +import Generic from '../../policies/generic'; // list all available query files with name and description const listQueries = async (req: Request, res: Response) => { @@ -39,8 +40,6 @@ const getFlags = async (req: Request, res: Response) => { }; // Reads the flags and runs the query after setting the flags -// TODO: no current restrictions on what an authenticated user can pull. -// Example: A user with only region 1 access can pull data for region 5 const runQuery = async (req: Request, res: Response) => { const scriptPath = req.query.path as string; const outputFormat = (req.query.format as string) || 'json'; @@ -52,6 +51,26 @@ const runQuery = async (req: Request, res: Response) => { try { const { flags, query, defaultOutputName } = readFlagsAndQueryFromFile(scriptPath); const flagValues: FlagValues = req.body; + + const userId = await currentUserId(req, res); + const user = await userById(userId); + const policy = new Generic(user); + + // Check if flagValues contains recipientIds, filter the values with policy.filterRegions passing in the recipientIds. + // If flagValues does not contain recipientIds, use policy.getAllAccessibleRegions to define it. + // Before calling validateFlagValues, recipientIds must be defined and not be an empty set + + if (flagValues.recipientIds) { + flagValues.recipientIds = policy.filterRegions(flagValues.recipientIds); + } else { + flagValues.recipientIds = policy.getAllAccessibleRegions(); + } + + if (!flagValues.recipientIds || flagValues.recipientIds.length === 0) { + res.sendStatus(401); + return; + } + validateFlagValues(flags, flagValues); await setFlags(flags, flagValues); const result = await executeQuery(query); @@ -70,6 +89,7 @@ const runQuery = async (req: Request, res: Response) => { } }; + export { listQueries, getFlags, From 36076ad0cdbc014a6f59415ba5d86b6866267e4b Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 10:51:05 -0700 Subject: [PATCH 08/60] lint --- src/policies/generic.test.js | 2 +- src/routes/ssdi/handlers.test.js | 16 ++++++++-------- src/routes/ssdi/handlers.ts | 8 ++++---- src/services/ssdi.test.js | 20 ++++++++++---------- src/services/ssdi.ts | 8 ++++---- 5 files changed, 27 insertions(+), 27 deletions(-) diff --git a/src/policies/generic.test.js b/src/policies/generic.test.js index f67c834710..2484353216 100644 --- a/src/policies/generic.test.js +++ b/src/policies/generic.test.js @@ -1,4 +1,4 @@ -import Generic from '../path/to/Generic'; +import Generic from './generic'; import SCOPES from '../middleware/scopeConstants'; describe('Generic', () => { diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index aba327abd2..985122c7a7 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -10,14 +10,14 @@ const { generateFlagString, executeQuery, currentUserId, - userById + userById, } = require('../../services/ssdi'); const { listQueries, getFlags, runQuery } = require('./handlers'); -const Generic = require('../../middleware/Generic'); +const Generic = require('../../policies/generic'); jest.mock('../../models'); jest.mock('../../services/ssdi'); -jest.mock('../../middleware/Generic'); +jest.mock('../../policies/generic'); const app = express(); app.use(express.json()); @@ -74,8 +74,8 @@ describe('API Endpoints', () => { permissions: [ { scopeId: 1, regionId: 1 }, { scopeId: 2, regionId: 2 }, - { scopeId: 3, regionId: 3 } - ] + { scopeId: 3, regionId: 3 }, + ], }; beforeEach(() => { @@ -92,8 +92,8 @@ describe('API Endpoints', () => { currentUserId.mockResolvedValue(user.id); userById.mockResolvedValue(user); Generic.mockImplementation(() => ({ - filterRegions: jest.fn((ids) => ids.filter(id => id <= 3)), - getAllAccessibleRegions: jest.fn(() => [1, 2, 3]) + filterRegions: jest.fn((ids) => ids.filter((id) => id <= 3)), + getAllAccessibleRegions: jest.fn(() => [1, 2, 3]), })); }); @@ -127,7 +127,7 @@ describe('API Endpoints', () => { it('should return 401 if recipientIds is an empty set', async () => { Generic.mockImplementation(() => ({ filterRegions: jest.fn(() => []), - getAllAccessibleRegions: jest.fn(() => []) + getAllAccessibleRegions: jest.fn(() => []), })); const response = await request(app) diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index 9046b4d195..2b5be91d60 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -56,9 +56,10 @@ const runQuery = async (req: Request, res: Response) => { const user = await userById(userId); const policy = new Generic(user); - // Check if flagValues contains recipientIds, filter the values with policy.filterRegions passing in the recipientIds. - // If flagValues does not contain recipientIds, use policy.getAllAccessibleRegions to define it. - // Before calling validateFlagValues, recipientIds must be defined and not be an empty set + // Check if flagValues contains recipientIds, filter the values with policy.filterRegions + // passing in the recipientIds. If flagValues does not contain recipientIds, use + // policy.getAllAccessibleRegions to define it. Before calling validateFlagValues, + // recipientIds must be defined and not be an empty set if (flagValues.recipientIds) { flagValues.recipientIds = policy.filterRegions(flagValues.recipientIds); @@ -89,7 +90,6 @@ const runQuery = async (req: Request, res: Response) => { } }; - export { listQueries, getFlags, diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js index 0095aad63c..121ea6efa4 100644 --- a/src/services/ssdi.test.js +++ b/src/services/ssdi.test.js @@ -188,41 +188,41 @@ describe('ssdi', () => { afterEach(() => { jest.clearAllMocks(); }); - + it('should throw an error if the query is not a string', async () => { const invalidQuery = 123; - + await expect(executeQuery(invalidQuery as unknown as string)).rejects.toThrow('The query must be a string'); }); - + it('should set the transaction to READ ONLY', async () => { const mockQuery = 'SELECT * FROM users;'; db.sequelize.query.mockResolvedValueOnce([]); - + await executeQuery(mockQuery); - + expect(db.sequelize.query).toHaveBeenCalledWith('SET TRANSACTION READ ONLY;', { type: db.sequelize.QueryTypes.RAW }); }); - + it('should return the result of the query', async () => { const mockQuery = 'SELECT * FROM users;'; const mockResult = [{ id: 1, name: 'John Doe' }]; db.sequelize.query .mockResolvedValueOnce([]) // for SET TRANSACTION READ ONLY .mockResolvedValueOnce(mockResult); // for the actual query - + const result = await executeQuery(mockQuery); - + expect(result).toEqual(mockResult); }); - + it('should throw an error if the query fails', async () => { const mockQuery = 'SELECT * FROM users;'; const mockError = new Error('Query execution failed'); db.sequelize.query .mockResolvedValueOnce([]) // for SET TRANSACTION READ ONLY .mockRejectedValueOnce(mockError); // for the actual query - + await expect(executeQuery(mockQuery)).rejects.toThrow(`Query failed: ${mockError.message}`); }); }); diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index 5d67f05504..de71274ef6 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -49,9 +49,9 @@ const readNameAndDescriptionFromFile = ( const defaultOutputNameMatch = fileContents.match(/@defaultOutputName:\s*(.*)/); const fileName = path.basename(filePath, path.extname(filePath)); return { - name: nameMatch ? nameMatch[1].trim() : fileName, // Use script file name as default + name: nameMatch ? nameMatch[1].trim() : fileName, description: descriptionMatch ? descriptionMatch[1].trim() : 'No description available', - defaultOutputName: defaultOutputNameMatch ? defaultOutputNameMatch[1].trim() : fileName, // Use script file name as default + defaultOutputName: defaultOutputNameMatch ? defaultOutputNameMatch[1].trim() : fileName, }; }; @@ -191,13 +191,13 @@ const executeQuery = async (query: string): Promise => { try { // Set transaction to READ ONLY, this will fail the transaction if any tables are modified await db.sequelize.query('SET TRANSACTION READ ONLY;', { type: QueryTypes.RAW }); - + const result = await db.sequelize.query(query, { type: QueryTypes.SELECT }); return result; } catch (error) { throw new Error(`Query failed: ${error.message}`); } -} +}; export { Flag, From 361d4e9a369801f0e344953a166eb73bd35e667e Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 12:43:30 -0700 Subject: [PATCH 09/60] Update handlers.test.js --- src/routes/ssdi/handlers.test.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 985122c7a7..1a02a31552 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -1,6 +1,5 @@ const request = require('supertest'); const express = require('express'); -const db = require('../../models'); const { listQueryFiles, readFlagsAndQueryFromFile, From 485752f83f35a6817b98f189b3806dd4049a1844 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 13 Jun 2024 12:59:06 -0700 Subject: [PATCH 10/60] Update handlers.test.js --- src/routes/ssdi/handlers.test.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 1a02a31552..0c675aaad4 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -14,7 +14,6 @@ const { const { listQueries, getFlags, runQuery } = require('./handlers'); const Generic = require('../../policies/generic'); -jest.mock('../../models'); jest.mock('../../services/ssdi'); jest.mock('../../policies/generic'); @@ -88,8 +87,8 @@ describe('API Endpoints', () => { executeQuery.mockResolvedValue([[{ id: 1, name: 'Test' }]]); sanitizeFilename.mockReturnValue('test_output_recipientIds_1-2-3'); generateFlagString.mockReturnValue('recipientIds_1-2-3'); - currentUserId.mockResolvedValue(user.id); - userById.mockResolvedValue(user); + currentUserId.mockResolvedValue(Promise.resolve(user.id)); + userById.mockResolvedValue(Promise.resolve(user)); Generic.mockImplementation(() => ({ filterRegions: jest.fn((ids) => ids.filter((id) => id <= 3)), getAllAccessibleRegions: jest.fn(() => [1, 2, 3]), From 1ba1986fbbfe7ba5e110664e513615e2e87c2257 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 18 Jun 2024 11:52:03 -0700 Subject: [PATCH 11/60] updates to functional --- src/routes/apiDirectory.js | 2 ++ src/routes/ssdi/handlers.test.js | 38 ++++++++++++++++++----- src/routes/ssdi/handlers.ts | 52 +++++++++++++++++++++++--------- src/services/ssdi.test.js | 8 +++-- src/services/ssdi.ts | 1 + 5 files changed, 77 insertions(+), 24 deletions(-) diff --git a/src/routes/apiDirectory.js b/src/routes/apiDirectory.js index 9b55c75691..48cfd29008 100644 --- a/src/routes/apiDirectory.js +++ b/src/routes/apiDirectory.js @@ -34,6 +34,7 @@ import monitoringRouter from './monitoring'; import coursesRouter from './courses'; import { currentUserId } from '../services/currentUser'; import objectiveRouter from './objectives'; +import ssdiRouter from './ssdi'; export const loginPath = '/login'; @@ -82,6 +83,7 @@ router.use('/national-center', nationalCenterRouter); router.use('/communication-logs', communicationLogRouter); router.use('/monitoring', monitoringRouter); router.use('/courses', coursesRouter); +router.use('/ssdi', ssdiRouter); const getUser = async (req, res) => { const userId = await currentUserId(req, res); diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 0c675aaad4..db40cd9c80 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -46,7 +46,7 @@ describe('API Endpoints', () => { it('should get flags from the script', async () => { readFlagsAndQueryFromFile.mockReturnValue({ flags: { flag1: { type: 'integer[]', description: 'Test Flag' } } }); - const response = await request(app).get('/getFlags').query({ path: 'test/path' }); + const response = await request(app).get('/getFlags').query({ path: 'src/queries/test/path' }); expect(response.status).toBe(200); expect(response.body).toEqual({ flag1: { type: 'integer[]', description: 'Test Flag' } }); }); @@ -57,10 +57,22 @@ describe('API Endpoints', () => { expect(response.text).toBe('Script path is required'); }); + it('should return 400 for path traversal attempts', async () => { + const response = await request(app).get('/getFlags').query({ path: '../outside/path' }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: Path traversal detected' }); + }); + + it('should return 400 if script path is not within allowed directory', async () => { + const response = await request(app).get('/getFlags').query({ path: 'some/other/path' }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + }); + it('should handle errors', async () => { readFlagsAndQueryFromFile.mockImplementation(() => { throw new Error('Error reading flags'); }); - const response = await request(app).get('/getFlags').query({ path: 'test/path' }); + const response = await request(app).get('/getFlags').query({ path: 'src/queries/test/path' }); expect(response.status).toBe(500); expect(response.text).toBe('Error reading flags'); }); @@ -98,7 +110,7 @@ describe('API Endpoints', () => { it('should run the query and return JSON result', async () => { const response = await request(app) .post('/runQuery') - .query({ path: 'test/path' }) + .query({ path: 'src/queries/test/path' }) .send({ recipientIds: [1, 2, 3, 4] }); expect(response.status).toBe(200); @@ -108,7 +120,7 @@ describe('API Endpoints', () => { it('should run the query and return CSV result', async () => { const response = await request(app) .post('/runQuery') - .query({ path: 'test/path', format: 'csv' }) + .query({ path: 'src/queries/test/path', format: 'csv' }) .send({ recipientIds: [1, 2, 3, 4] }); expect(response.status).toBe(200); @@ -122,7 +134,19 @@ describe('API Endpoints', () => { expect(response.text).toBe('Script path is required'); }); - it('should return 401 if recipientIds is an empty set', async () => { + it('should return 400 for path traversal attempts', async () => { + const response = await request(app).post('/runQuery').query({ path: '../outside/path' }).send({ recipientIds: [1, 2, 3] }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: Path traversal detected' }); + }); + + it('should return 400 if script path is not within allowed directory', async () => { + const response = await request(app).post('/runQuery').query({ path: 'some/other/path' }).send({ recipientIds: [1, 2, 3] }); + expect(response.status).toBe(400); + expect(response.body).toEqual({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + }); + + it('should return 401 if regionIds is an empty set', async () => { Generic.mockImplementation(() => ({ filterRegions: jest.fn(() => []), getAllAccessibleRegions: jest.fn(() => []), @@ -130,7 +154,7 @@ describe('API Endpoints', () => { const response = await request(app) .post('/runQuery') - .query({ path: 'test/path' }) + .query({ path: 'src/queries/test/path' }) .send({}); expect(response.status).toBe(401); @@ -141,7 +165,7 @@ describe('API Endpoints', () => { const response = await request(app) .post('/runQuery') - .query({ path: 'test/path' }) + .query({ path: 'src/queries/test/path' }) .send({ recipientIds: [1, 2, 3] }); expect(response.status).toBe(500); diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index 2b5be91d60..70ed1ccf65 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -1,6 +1,7 @@ import stringify from 'csv-stringify/lib/sync'; import { Request, Response } from 'express'; -import db from '../../models'; // Adjust the path according to your project structure +import { currentUserId } from '../../services/currentUser'; +import { userById } from '../../services/users'; import { FlagValues, listQueryFiles, @@ -16,7 +17,7 @@ import Generic from '../../policies/generic'; // list all available query files with name and description const listQueries = async (req: Request, res: Response) => { try { - const queryFiles = listQueryFiles('./queries'); + const queryFiles = listQueryFiles('./src/queries/'); res.json(queryFiles); } catch (error) { res.status(500).send('Error listing query files'); @@ -30,15 +31,30 @@ const getFlags = async (req: Request, res: Response) => { res.status(400).send('Script path is required'); return; } + if (scriptPath.includes('../')) { + res.status(400).json({ error: 'Invalid script path: Path traversal detected' }); + return; + } + if (!scriptPath.startsWith('src/queries/')) { + res.status(400).json({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + return; + } try { - const { flags } = readFlagsAndQueryFromFile(scriptPath); + const { flags } = readFlagsAndQueryFromFile(`./${scriptPath}`); res.json(flags); } catch (error) { res.status(500).send('Error reading flags'); } }; +const filterAttributes = ( + obj: T, + keysToRemove: (keyof T)[], +): Partial => Object.fromEntries( + Object.entries(obj).filter(([key]) => !keysToRemove.includes(key as keyof T)), + ) as Partial; + // Reads the flags and runs the query after setting the flags const runQuery = async (req: Request, res: Response) => { const scriptPath = req.query.path as string; @@ -50,24 +66,30 @@ const runQuery = async (req: Request, res: Response) => { try { const { flags, query, defaultOutputName } = readFlagsAndQueryFromFile(scriptPath); - const flagValues: FlagValues = req.body; - + const flagValues: FlagValues = filterAttributes( + { + ...req.body, + ...req.query, + }, + ['path', 'format'], + ); const userId = await currentUserId(req, res); const user = await userById(userId); const policy = new Generic(user); - // Check if flagValues contains recipientIds, filter the values with policy.filterRegions - // passing in the recipientIds. If flagValues does not contain recipientIds, use + // Check if flagValues contains regionIds, filter the values with policy.filterRegions + // passing in the regionIds. If flagValues does not contain regionIds, use // policy.getAllAccessibleRegions to define it. Before calling validateFlagValues, - // recipientIds must be defined and not be an empty set + // regionIds must be defined and not be an empty set - if (flagValues.recipientIds) { - flagValues.recipientIds = policy.filterRegions(flagValues.recipientIds); + if (flagValues.regionIds) { + flagValues.regionIds = flagValues.regionIds.map(Number).filter((num) => !Number.isNaN(num)); + flagValues.regionIds = policy.filterRegions(flagValues.regionIds); } else { - flagValues.recipientIds = policy.getAllAccessibleRegions(); + flagValues.regionIds = policy.getAllAccessibleRegions(); } - if (!flagValues.recipientIds || flagValues.recipientIds.length === 0) { + if (!flagValues.regionIds || flagValues.regionIds.length === 0) { res.sendStatus(401); return; } @@ -81,9 +103,11 @@ const runQuery = async (req: Request, res: Response) => { if (outputFormat === 'csv') { res.setHeader('Content-Type', 'text/csv'); res.setHeader('Content-Disposition', `attachment; filename="${sanitizedOutputName}.csv"`); - stringify(result[0], { header: true }).pipe(res); + const csvData = stringify(result, { header: true }); + res.attachment(`${sanitizedOutputName}.csv`); + res.send(`\ufeff${csvData}`); } else { - res.json(result[0]); + res.json(result); } } catch (error) { res.status(500).send(`Error executing query: ${error.message}`); diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js index 121ea6efa4..cef69861a6 100644 --- a/src/services/ssdi.test.js +++ b/src/services/ssdi.test.js @@ -83,13 +83,15 @@ describe('ssdi', () => { }); describe('readFlagsAndQueryFromFile', () => { - it('should read flags and query from file', () => { + it('should read flags and query from file and remove comments', () => { const fileContents = ` /* * - ssdi.flag1 - integer[] - Flag description * @defaultOutputName: test_output */ - SELECT * FROM table;\n`; + SELECT * FROM table; -- comment + SELECT * FROM another_table; -- another comment + `; fs.readFileSync.mockReturnValue(fileContents); const result = readFlagsAndQueryFromFile('test/path.sql'); @@ -101,7 +103,7 @@ describe('ssdi', () => { description: 'Flag description', }, }, - query: 'SELECT * FROM table;', + query: 'SELECT * FROM table;\nSELECT * FROM another_table;', defaultOutputName: 'test_output', }); }); diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index de71274ef6..b871ba759b 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -115,6 +115,7 @@ const readFlagsAndQueryFromFile = ( // Join the query lines and remove leading and trailing blank lines const query = queryLines + .map((line) => line.replace(/--.*$/, '')) .join('\n') .replace(/^\s*\n|\s*\n$/g, '') .replace(/^\s*/, ''); From 2761745acbf1d2fed6590891719398b9524a472c Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 20 Jun 2024 10:48:03 -0700 Subject: [PATCH 12/60] Update handlers.test.js --- src/routes/ssdi/handlers.test.js | 43 ++++++++++++++++++++++---------- 1 file changed, 30 insertions(+), 13 deletions(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index db40cd9c80..9bdc24d937 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -1,6 +1,6 @@ -const request = require('supertest'); -const express = require('express'); -const { +import request from 'supertest'; +import express from 'express'; +import { listQueryFiles, readFlagsAndQueryFromFile, validateFlagValues, @@ -8,13 +8,30 @@ const { sanitizeFilename, generateFlagString, executeQuery, - currentUserId, - userById, -} = require('../../services/ssdi'); -const { listQueries, getFlags, runQuery } = require('./handlers'); -const Generic = require('../../policies/generic'); +} from '../../services/ssdi'; +import { currentUserId } from '../../services/currentUser'; +import { userById } from '../../services/users'; +import { listQueries, getFlags, runQuery } from './handlers'; +import Generic from '../../policies/generic'; + +jest.mock('../../services/ssdi', () => ({ + listQueryFiles: jest.fn(), + readFlagsAndQueryFromFile: jest.fn(), + validateFlagValues: jest.fn(), + setFlags: jest.fn(), + sanitizeFilename: jest.fn(), + generateFlagString: jest.fn(), + executeQuery: jest.fn(), +})); + +jest.mock('../../services/currentUser', () => ({ + currentUserId: jest.fn(), +})); + +jest.mock('../../services/users', () => ({ + userById: jest.fn(), +})); -jest.mock('../../services/ssdi'); jest.mock('../../policies/generic'); const app = express(); @@ -96,11 +113,11 @@ describe('API Endpoints', () => { }); validateFlagValues.mockImplementation(() => {}); setFlags.mockResolvedValue([]); - executeQuery.mockResolvedValue([[{ id: 1, name: 'Test' }]]); + executeQuery.mockResolvedValue([{ id: 1, name: 'Test' }]); sanitizeFilename.mockReturnValue('test_output_recipientIds_1-2-3'); generateFlagString.mockReturnValue('recipientIds_1-2-3'); - currentUserId.mockResolvedValue(Promise.resolve(user.id)); - userById.mockResolvedValue(Promise.resolve(user)); + currentUserId.mockResolvedValue(user.id); + userById.mockResolvedValue(user); Generic.mockImplementation(() => ({ filterRegions: jest.fn((ids) => ids.filter((id) => id <= 3)), getAllAccessibleRegions: jest.fn(() => [1, 2, 3]), @@ -124,7 +141,7 @@ describe('API Endpoints', () => { .send({ recipientIds: [1, 2, 3, 4] }); expect(response.status).toBe(200); - expect(response.headers['content-type']).toBe('text/csv'); + expect(response.headers['content-type']).toBe('text/csv; charset=utf-8'); expect(response.headers['content-disposition']).toBe('attachment; filename="test_output_recipientIds_1-2-3.csv"'); }); From aabe721fc64405e5d753e405164ab08db1519f84 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 20 Jun 2024 11:55:43 -0700 Subject: [PATCH 13/60] fix tests --- src/services/ssdi.test.js | 2 +- src/services/ssdi.ts | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js index cef69861a6..061aeda361 100644 --- a/src/services/ssdi.test.js +++ b/src/services/ssdi.test.js @@ -194,7 +194,7 @@ describe('ssdi', () => { it('should throw an error if the query is not a string', async () => { const invalidQuery = 123; - await expect(executeQuery(invalidQuery as unknown as string)).rejects.toThrow('The query must be a string'); + await expect(executeQuery(invalidQuery)).rejects.toThrow('The query must be a string'); }); it('should set the transaction to READ ONLY', async () => { diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index b871ba759b..5c2ceec370 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -109,16 +109,15 @@ const readFlagsAndQueryFromFile = ( defaultOutputName = defaultOutputNameMatch[1].trim(); } } else { - queryLines.push(line); + queryLines.push(line.trim()); // Trim spaces from each query line } }); // Join the query lines and remove leading and trailing blank lines const query = queryLines - .map((line) => line.replace(/--.*$/, '')) + .map((line) => line.replace(/--.*$/, '').trim()) .join('\n') - .replace(/^\s*\n|\s*\n$/g, '') - .replace(/^\s*/, ''); + .replace(/^\s*\n|\s*\n$/g, ''); const cachedQuery: CachedQuery = { flags, query, defaultOutputName }; // Cache the parsed query data @@ -127,6 +126,7 @@ const readFlagsAndQueryFromFile = ( return cachedQuery; }; + // Function to validate the type of the flag values const validateType = ( expectedType: string, From baef4f02792b5288bebe47118de82b1b3943c26e Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 20 Jun 2024 12:00:13 -0700 Subject: [PATCH 14/60] Update ssdi.ts --- src/services/ssdi.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index 5c2ceec370..d088e39ded 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -126,7 +126,6 @@ const readFlagsAndQueryFromFile = ( return cachedQuery; }; - // Function to validate the type of the flag values const validateType = ( expectedType: string, From d12a6e9ee5a3764bf0ef2957645d64ca99c96147 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 20 Jun 2024 13:00:38 -0700 Subject: [PATCH 15/60] Update handlers.ts --- src/routes/ssdi/handlers.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/routes/ssdi/handlers.ts b/src/routes/ssdi/handlers.ts index 70ed1ccf65..10e4aaefc3 100644 --- a/src/routes/ssdi/handlers.ts +++ b/src/routes/ssdi/handlers.ts @@ -63,6 +63,14 @@ const runQuery = async (req: Request, res: Response) => { res.status(400).send('Script path is required'); return; } + if (scriptPath.includes('../')) { + res.status(400).json({ error: 'Invalid script path: Path traversal detected' }); + return; + } + if (!scriptPath.startsWith('src/queries/')) { + res.status(400).json({ error: 'Invalid script path: all scripts are located within "src/queries/"' }); + return; + } try { const { flags, query, defaultOutputName } = readFlagsAndQueryFromFile(scriptPath); From 5eff653c0a33fce60daef09f3cb5c01d0a503999 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 20 Jun 2024 13:49:48 -0700 Subject: [PATCH 16/60] more tests to reach 100% coverage --- src/routes/ssdi/handlers.test.js | 21 ++++++++++++++ src/services/ssdi.test.js | 50 ++++++++++++++++++++++++++++++++ src/services/ssdi.ts | 2 ++ 3 files changed, 73 insertions(+) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 9bdc24d937..65fc0adc22 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -188,5 +188,26 @@ describe('API Endpoints', () => { expect(response.status).toBe(500); expect(response.text).toBe('Error executing query: Error reading query'); }); + + it('should filter out non-integer regionIds', async () => { + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({ regionIds: [1, 'a', 2, 'b', 3] }); + + expect(response.status).toBe(200); + expect(policy.filterRegions).toHaveBeenCalledWith([1, 2, 3]); + }); + + it('should filter regionIds using policy', async () => { + const response = await request(app) + .post('/runQuery') + .query({ path: 'src/queries/test/path' }) + .send({ regionIds: [1, 2, 3, 4] }); + + expect(response.status).toBe(200); + expect(policy.filterRegions).toHaveBeenCalledWith([1, 2, 3, 4]); + expect(response.body).toEqual([{ id: 1, name: 'Test' }]); + }); }); }); diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js index 061aeda361..8f3ef4d036 100644 --- a/src/services/ssdi.test.js +++ b/src/services/ssdi.test.js @@ -2,6 +2,8 @@ import fs from 'fs'; import path from 'path'; import db from '../models'; import { + queryFileCache, + queryDataCache, listQueryFiles, readNameAndDescriptionFromFile, readFlagsAndQueryFromFile, @@ -22,6 +24,12 @@ jest.mock('../models', () => ({ }, })); +// Clear the caches before each test +beforeEach(() => { + queryFileCache.clear(); + queryDataCache.clear(); +}); + describe('ssdi', () => { describe('readNameAndDescriptionFromFile', () => { it('should read name, description, and default output name from file', () => { @@ -80,6 +88,25 @@ describe('ssdi', () => { }, ]); }); + + it('should return cached query file data if available', () => { + fs.readdirSync.mockReturnValue(['file1.sql']); + const cachedQueryFile = { + name: 'CachedName', + description: 'Cached description', + filePath: 'test/directory/file1.sql', + defaultOutputName: 'cached_output', + }; + + // Set the cache with the cachedQueryFile data + queryFileCache.set('test/directory/file1.sql', cachedQueryFile); + + const result = listQueryFiles('test/directory'); + expect(result).toEqual([cachedQueryFile]); + + // Ensure fs.readFileSync is not called since data is from cache + expect(fs.readFileSync).not.toHaveBeenCalled(); + }); }); describe('readFlagsAndQueryFromFile', () => { @@ -107,6 +134,29 @@ describe('ssdi', () => { defaultOutputName: 'test_output', }); }); + + it('should return cached query data if available', () => { + const cachedQuery = { + flags: { + flag1: { + type: 'integer[]', + name: 'flag1', + description: 'Cached Flag description', + }, + }, + query: 'SELECT * FROM cached_table;', + defaultOutputName: 'cached_output', + }; + + // Set the cache with the cachedQuery data + queryDataCache.set('test/path.sql', cachedQuery); + + const result = readFlagsAndQueryFromFile('test/path.sql'); + expect(result).toEqual(cachedQuery); + + // Ensure fs.readFileSync is not called since data is from cache + expect(fs.readFileSync).not.toHaveBeenCalled(); + }); }); describe('validateType', () => { diff --git a/src/services/ssdi.ts b/src/services/ssdi.ts index d088e39ded..2f5509736a 100644 --- a/src/services/ssdi.ts +++ b/src/services/ssdi.ts @@ -204,6 +204,8 @@ export { Flags, FlagValues, QueryFile, + queryFileCache, + queryDataCache, listQueryFiles, readNameAndDescriptionFromFile, readFlagsAndQueryFromFile, From 345939e2872a810a9f5cbb16f5ff7ee2c344590f Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Fri, 21 Jun 2024 17:28:38 -0700 Subject: [PATCH 17/60] Update handlers.test.js --- src/routes/ssdi/handlers.test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index 65fc0adc22..cd3429d1fd 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -196,7 +196,7 @@ describe('API Endpoints', () => { .send({ regionIds: [1, 'a', 2, 'b', 3] }); expect(response.status).toBe(200); - expect(policy.filterRegions).toHaveBeenCalledWith([1, 2, 3]); + expect(Generic().filterRegions).toHaveBeenCalledWith([1, 2, 3]); }); it('should filter regionIds using policy', async () => { @@ -206,7 +206,7 @@ describe('API Endpoints', () => { .send({ regionIds: [1, 2, 3, 4] }); expect(response.status).toBe(200); - expect(policy.filterRegions).toHaveBeenCalledWith([1, 2, 3, 4]); + expect(Generic().filterRegions).toHaveBeenCalledWith([1, 2, 3, 4]); expect(response.body).toEqual([{ id: 1, name: 'Test' }]); }); }); From 9d17fd80de886a7cb4b27c61a9c7ac217b0e3576 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 26 Jun 2024 17:02:21 -0700 Subject: [PATCH 18/60] first pass --- src/lib/apiErrorHandler.js | 89 ++++++++++++++++++++++ src/lib/mailer/index.js | 120 +++++++++++++++++++++++++----- src/lib/maintenance/common.js | 9 ++- src/services/resourceQueue.js | 3 +- src/services/s3Queue.js | 3 +- src/services/scanQueue.js | 3 +- src/workers/trasnactionWrapper.ts | 43 +++++++++++ 7 files changed, 249 insertions(+), 21 deletions(-) create mode 100644 src/workers/trasnactionWrapper.ts diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 83fdd161e6..8658addd57 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -141,3 +141,92 @@ export default async function handleErrors(req, res, error, logContext) { handleUnexpectedErrorInCatchBlock(req, res, e, logContext); } } + +const logWorkerError = async (job, operation, error, logContext) => { + if ( + operation !== 'SequelizeError' + && process.env.SUPPRESS_ERROR_LOGGING + && process.env.SUPPRESS_ERROR_LOGGING.toLowerCase() === 'true' + ) { + return 0; + } + + try { + const responseBody = typeof error === 'object' + && error !== null ? { ...error, errorStack: error?.stack } : error; + + const requestBody = { + ...(job.data + && typeof job.data === 'object' + && Object.keys(job.data).length > 0 + && { data: job.data }), + }; + + const requestErrorId = await createRequestError({ + operation, + uri: job.queue.name, // Assuming the queue name serves as a URI equivalent + method: 'PROCESS_JOB', // Indicating this is a job processing operation + requestBody, + responseBody, + responseCode: 'INTERNAL_SERVER_ERROR', + }); + + return requestErrorId; + } catch (e) { + logger.error(`${logContext.namespace} - Sequelize error - unable to store RequestError - ${e}`); + } + + return null; +}; + +export const handleWorkerError = async (job, error, logContext) => { + if (process.env.NODE_ENV === 'development') { + logger.error(error); + } + + let operation; + let label; + + if (error instanceof Sequelize.Error) { + operation = 'SequelizeError'; + label = 'Sequelize error'; + } else { + operation = 'UNEXPECTED_ERROR'; + label = 'UNEXPECTED ERROR'; + } + + if (error instanceof Sequelize.ConnectionError + || error instanceof Sequelize.ConnectionAcquireTimeoutError) { + logger.error(`${logContext.namespace} Connection Pool: ${JSON.stringify(sequelize.connectionManager.pool)}`); + } + + const requestErrorId = await logWorkerError(job, operation, error, logContext); + + let errorMessage; + + if (error?.stack) { + errorMessage = error.stack; + } else { + errorMessage = error; + } + + if (requestErrorId) { + logger.error(`${logContext.namespace} - id: ${requestErrorId} ${label} - ${errorMessage}`); + } else { + logger.error(`${logContext.namespace} - ${label} - ${errorMessage}`); + } + + // Handle job failure as needed +}; + +export const handleUnexpectedWorkerError = (job, error, logContext) => { + logger.error(`${logContext.namespace} - Unexpected error in catch block - ${error}`); +}; + +export const handleWorkerErrors = async (job, error, logContext) => { + try { + await handleWorkerError(job, error, logContext); + } catch (e) { + handleUnexpectedWorkerError(job, e, logContext); + } +}; diff --git a/src/lib/mailer/index.js b/src/lib/mailer/index.js index de4b2091de..5708665733 100644 --- a/src/lib/mailer/index.js +++ b/src/lib/mailer/index.js @@ -17,6 +17,7 @@ import { } from '../../services/activityReports'; import { userById } from '../../services/users'; import logEmailNotification from './logNotifications'; +import transactionQueueWrapper from '../../workers/transactionWrapper'; export const notificationQueue = newQueue('notifications'); @@ -992,51 +993,136 @@ export const processNotificationQueue = () => { notificationQueue.on('completed', onCompletedNotification); increaseListeners(notificationQueue, 10); - notificationQueue.process(EMAIL_ACTIONS.NEEDS_ACTION, notifyChangesRequested); - notificationQueue.process(EMAIL_ACTIONS.SUBMITTED, notifyApproverAssigned); - notificationQueue.process(EMAIL_ACTIONS.APPROVED, notifyReportApproved); - notificationQueue.process(EMAIL_ACTIONS.COLLABORATOR_ADDED, notifyCollaboratorAssigned); - notificationQueue.process(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, notifyRecipientReportApproved); + notificationQueue.process( + EMAIL_ACTIONS.NEEDS_ACTION, + (job) => transactionQueueWrapper( + notifyApproverAssigned(job), + EMAIL_ACTIONS.NEEDS_ACTION, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.SUBMITTED, + (job) => transactionQueueWrapper( + notifyApproverAssigned(job), + EMAIL_ACTIONS.SUBMITTED, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.APPROVED, + (job) => transactionQueueWrapper( + notifyApproverAssigned(job), + EMAIL_ACTIONS.APPROVED, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.COLLABORATOR_ADDED, + (job) => transactionQueueWrapper( + notifyApproverAssigned(job), + EMAIL_ACTIONS.COLLABORATOR_ADDED, + ), + ); + + notificationQueue.process( + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, + (job) => transactionQueueWrapper( + notifyApproverAssigned(job), + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, + ), + ); - notificationQueue.process(EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.SUBMITTED_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.APPROVED_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.COLLABORATOR_DIGEST, notifyDigest); - notificationQueue.process(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, notifyDigest); + notificationQueue.process( + EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, + (job) => transactionQueueWrapper( + notifyDigest(job), + EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.SUBMITTED_DIGEST, + (job) => transactionQueueWrapper( + notifyDigest(job), + EMAIL_ACTIONS.SUBMITTED_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.APPROVED_DIGEST, + (job) => transactionQueueWrapper( + notifyDigest(job), + EMAIL_ACTIONS.APPROVED_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.COLLABORATOR_DIGEST, + (job) => transactionQueueWrapper( + notifyDigest(job), + EMAIL_ACTIONS.COLLABORATOR_DIGEST, + ), + ); + notificationQueue.process( + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, + (job) => transactionQueueWrapper( + notifyDigest(job), + EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, + ), + ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, + ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, - sendTrainingReportNotification, + (job) => transactionQueueWrapper( + sendTrainingReportNotification(job), + EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, + ), ); }; diff --git a/src/lib/maintenance/common.js b/src/lib/maintenance/common.js index 3f4849eddd..c04ecd9d79 100644 --- a/src/lib/maintenance/common.js +++ b/src/lib/maintenance/common.js @@ -5,6 +5,7 @@ const { MaintenanceLog } = require('../../models'); const { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } = require('../../constants'); const { auditLogger, logger } = require('../../logger'); const { default: LockManager } = require('../lockManager'); +import transactionQueueWrapper from '../../workers/transactionWrapper'; const maintenanceQueue = newQueue('maintenance'); const maintenanceQueueProcessors = {}; @@ -103,7 +104,13 @@ const processMaintenanceQueue = () => { // Process each category in the queue using its corresponding processor Object.entries(maintenanceQueueProcessors) - .map(([category, processor]) => maintenanceQueue.process(category, processor)); + .map(([category, processor]) => maintenanceQueue.process( + category, + (job) => transactionQueueWrapper( + processor(job), + category, + ), + )); }; /** diff --git a/src/services/resourceQueue.js b/src/services/resourceQueue.js index 1a00fdc107..53befb9dc6 100644 --- a/src/services/resourceQueue.js +++ b/src/services/resourceQueue.js @@ -2,6 +2,7 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { RESOURCE_ACTIONS } from '../constants'; import { logger, auditLogger } from '../logger'; import { getResourceMetaDataJob } from '../lib/resource'; +import transactionQueueWrapper from '../workers/transactionWrapper'; const resourceQueue = newQueue('resource'); @@ -47,7 +48,7 @@ const processResourceQueue = () => { // Get resource metadata. resourceQueue.process( RESOURCE_ACTIONS.GET_METADATA, - getResourceMetaDataJob, + (job) => transactionQueueWrapper(getResourceMetaDataJob(job), RESOURCE_ACTIONS.GET_METADATA), ); }; diff --git a/src/services/s3Queue.js b/src/services/s3Queue.js index 05c02a233d..cfa1abac1b 100644 --- a/src/services/s3Queue.js +++ b/src/services/s3Queue.js @@ -2,6 +2,7 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { S3_ACTIONS } from '../constants'; import { logger, auditLogger } from '../logger'; import { deleteFileFromS3Job } from '../lib/s3'; +import transactionQueueWrapper from '../workers/transactionWrapper'; const s3Queue = newQueue('s3'); @@ -33,7 +34,7 @@ const processS3Queue = () => { // Delete S3 file. s3Queue.process( S3_ACTIONS.DELETE_FILE, - deleteFileFromS3Job, + (job) => transactionQueueWrapper(deleteFileFromS3Job(job), S3_ACTIONS.DELETE_FILE), ); }; diff --git a/src/services/scanQueue.js b/src/services/scanQueue.js index ca4a4b7aac..34e9fe461c 100644 --- a/src/services/scanQueue.js +++ b/src/services/scanQueue.js @@ -1,6 +1,7 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { logger, auditLogger } from '../logger'; import processFile from '../workers/files'; +import transactionQueueWrapper from '../workers/transactionWrapper'; const scanQueue = newQueue('scan'); const addToScanQueue = (fileKey) => { @@ -35,7 +36,7 @@ const processScanQueue = () => { scanQueue.on('failed', onFailedScanQueue); scanQueue.on('completed', onCompletedScanQueue); increaseListeners(scanQueue); - scanQueue.process((job) => processFile(job.data.key)); + scanQueue.process((job) => transactionQueueWrapper(processFile(job.data.key))); }; export { diff --git a/src/workers/trasnactionWrapper.ts b/src/workers/trasnactionWrapper.ts new file mode 100644 index 0000000000..4a56eb9c0a --- /dev/null +++ b/src/workers/trasnactionWrapper.ts @@ -0,0 +1,43 @@ +import { Sequelize } from 'sequelize'; +import { addAuditTransactionSettings, removeFromAuditedTransactions } from '../models/auditModelGenerator'; +import { sequelize } from '../models'; +import { handleWorkerErrors } from '../lib/apiErrorHandler'; +import { auditLogger } from '../logger'; + +const namespace = 'WORKER:WRAPPER'; +const logContext = { + namespace, +}; + +type Job = any; // Define the correct type for your job here + +const transactionQueueWrapper = ( + originalFunction: (job: Job) => Promise, + context = '', +) => async (job: Job): Promise => { + let error: Error | undefined; + const startTime = Date.now(); + try { + return sequelize.transaction(async () => { + let result; + try { + // eslint-disable-next-line + await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); + result = await originalFunction(job); + const duration = Date.now() - startTime; + auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); + removeFromAuditedTransactions(); + } catch (err) { + auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${(err as Error).message}`); + error = err as Error; + throw err; + } + return result; + }); + } catch (err) { + await handleWorkerErrors(job, error || err, logContext); + throw error || err; + } +}; + +export default transactionQueueWrapper; From 5bb93406de6c6d046599945f67eeaf1d067e6aea Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 26 Jun 2024 22:07:58 -0700 Subject: [PATCH 19/60] lint fixes --- src/lib/maintenance/common.js | 2 +- src/workers/{trasnactionWrapper.ts => transactionWrapper.ts} | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) rename src/workers/{trasnactionWrapper.ts => transactionWrapper.ts} (88%) diff --git a/src/lib/maintenance/common.js b/src/lib/maintenance/common.js index c04ecd9d79..c9629a9d93 100644 --- a/src/lib/maintenance/common.js +++ b/src/lib/maintenance/common.js @@ -5,7 +5,7 @@ const { MaintenanceLog } = require('../../models'); const { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } = require('../../constants'); const { auditLogger, logger } = require('../../logger'); const { default: LockManager } = require('../lockManager'); -import transactionQueueWrapper from '../../workers/transactionWrapper'; +const { default: transactionQueueWrapper } = require('../../workers/transactionWrapper'); const maintenanceQueue = newQueue('maintenance'); const maintenanceQueueProcessors = {}; diff --git a/src/workers/trasnactionWrapper.ts b/src/workers/transactionWrapper.ts similarity index 88% rename from src/workers/trasnactionWrapper.ts rename to src/workers/transactionWrapper.ts index 4a56eb9c0a..e0b64d71ce 100644 --- a/src/workers/trasnactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -9,11 +9,14 @@ const logContext = { namespace, }; +// eslint-disable-next-line @typescript-eslint/no-explicit-any type Job = any; // Define the correct type for your job here const transactionQueueWrapper = ( + // eslint-disable-next-line @typescript-eslint/no-explicit-any originalFunction: (job: Job) => Promise, context = '', + // eslint-disable-next-line @typescript-eslint/no-explicit-any ) => async (job: Job): Promise => { let error: Error | undefined; const startTime = Date.now(); From 03d72202ee2213f4cf096d117cb1b65025c0b2b3 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 27 Jun 2024 00:50:47 -0700 Subject: [PATCH 20/60] correct use --- src/lib/mailer/index.js | 68 +++++++++++++++--------------- src/lib/maintenance/common.js | 4 +- src/lib/maintenance/common.test.js | 21 +++++++-- src/services/resourceQueue.js | 5 ++- src/services/s3Queue.js | 5 ++- src/services/scanQueue.js | 3 +- 6 files changed, 63 insertions(+), 43 deletions(-) diff --git a/src/lib/mailer/index.js b/src/lib/mailer/index.js index 5708665733..81717eccef 100644 --- a/src/lib/mailer/index.js +++ b/src/lib/mailer/index.js @@ -995,132 +995,132 @@ export const processNotificationQueue = () => { notificationQueue.process( EMAIL_ACTIONS.NEEDS_ACTION, - (job) => transactionQueueWrapper( - notifyApproverAssigned(job), + transactionQueueWrapper( + notifyApproverAssigned, EMAIL_ACTIONS.NEEDS_ACTION, ), ); notificationQueue.process( EMAIL_ACTIONS.SUBMITTED, - (job) => transactionQueueWrapper( - notifyApproverAssigned(job), + transactionQueueWrapper( + notifyApproverAssigned, EMAIL_ACTIONS.SUBMITTED, ), ); notificationQueue.process( EMAIL_ACTIONS.APPROVED, - (job) => transactionQueueWrapper( - notifyApproverAssigned(job), + transactionQueueWrapper( + notifyApproverAssigned, EMAIL_ACTIONS.APPROVED, ), ); notificationQueue.process( EMAIL_ACTIONS.COLLABORATOR_ADDED, - (job) => transactionQueueWrapper( - notifyApproverAssigned(job), + transactionQueueWrapper( + notifyApproverAssigned, EMAIL_ACTIONS.COLLABORATOR_ADDED, ), ); notificationQueue.process( EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, - (job) => transactionQueueWrapper( - notifyApproverAssigned(job), + transactionQueueWrapper( + notifyApproverAssigned, EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, ), ); notificationQueue.process( EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, - (job) => transactionQueueWrapper( - notifyDigest(job), + transactionQueueWrapper( + notifyDigest, EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, ), ); notificationQueue.process( EMAIL_ACTIONS.SUBMITTED_DIGEST, - (job) => transactionQueueWrapper( - notifyDigest(job), + transactionQueueWrapper( + notifyDigest, EMAIL_ACTIONS.SUBMITTED_DIGEST, ), ); notificationQueue.process( EMAIL_ACTIONS.APPROVED_DIGEST, - (job) => transactionQueueWrapper( - notifyDigest(job), + transactionQueueWrapper( + notifyDigest, EMAIL_ACTIONS.APPROVED_DIGEST, ), ); notificationQueue.process( EMAIL_ACTIONS.COLLABORATOR_DIGEST, - (job) => transactionQueueWrapper( - notifyDigest(job), + transactionQueueWrapper( + notifyDigest, EMAIL_ACTIONS.COLLABORATOR_DIGEST, ), ); notificationQueue.process( EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, - (job) => transactionQueueWrapper( - notifyDigest(job), + transactionQueueWrapper( + notifyDigest, EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, ), ); notificationQueue.process( EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, - (job) => transactionQueueWrapper( - sendTrainingReportNotification(job), + transactionQueueWrapper( + sendTrainingReportNotification, EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, ), ); diff --git a/src/lib/maintenance/common.js b/src/lib/maintenance/common.js index c9629a9d93..60aac879fc 100644 --- a/src/lib/maintenance/common.js +++ b/src/lib/maintenance/common.js @@ -106,8 +106,8 @@ const processMaintenanceQueue = () => { Object.entries(maintenanceQueueProcessors) .map(([category, processor]) => maintenanceQueue.process( category, - (job) => transactionQueueWrapper( - processor(job), + transactionQueueWrapper( + processor, category, ), )); diff --git a/src/lib/maintenance/common.test.js b/src/lib/maintenance/common.test.js index d4623cc750..471a165ff7 100644 --- a/src/lib/maintenance/common.test.js +++ b/src/lib/maintenance/common.test.js @@ -21,6 +21,7 @@ const { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } = require('../../constants'); const { MaintenanceLog } = require('../../models'); const { auditLogger, logger } = require('../../logger'); +const { default: transactionWrapper } = require('../../workers/transactionWrapper'); jest.mock('../../models', () => ({ MaintenanceLog: { @@ -113,13 +114,25 @@ describe('Maintenance Queue', () => { addQueueProcessor(category1, processor1); addQueueProcessor(category2, processor2); processMaintenanceQueue(); + expect(maintenanceQueue.process).toHaveBeenCalledTimes(3); - expect(maintenanceQueue.process).toHaveBeenCalledWith(category1, processor1); - expect(maintenanceQueue.process).toHaveBeenCalledWith(category2, processor2); expect(maintenanceQueue.process) - .toHaveBeenCalledWith( + .toHaveBeenNthCalledWith( + 1, MAINTENANCE_CATEGORY.MAINTENANCE, - maintenance, + expect.any(Function), + ); + expect(maintenanceQueue.process) + .toHaveBeenNthCalledWith( + 2, + category1, + expect.any(Function), + ); + expect(maintenanceQueue.process) + .toHaveBeenNthCalledWith( + 3, + category2, + expect.any(Function), ); }); }); diff --git a/src/services/resourceQueue.js b/src/services/resourceQueue.js index 53befb9dc6..eee5f74fa2 100644 --- a/src/services/resourceQueue.js +++ b/src/services/resourceQueue.js @@ -48,7 +48,10 @@ const processResourceQueue = () => { // Get resource metadata. resourceQueue.process( RESOURCE_ACTIONS.GET_METADATA, - (job) => transactionQueueWrapper(getResourceMetaDataJob(job), RESOURCE_ACTIONS.GET_METADATA), + transactionQueueWrapper( + getResourceMetaDataJob, + RESOURCE_ACTIONS.GET_METADATA, + ), ); }; diff --git a/src/services/s3Queue.js b/src/services/s3Queue.js index cfa1abac1b..446cbb6c4c 100644 --- a/src/services/s3Queue.js +++ b/src/services/s3Queue.js @@ -34,7 +34,10 @@ const processS3Queue = () => { // Delete S3 file. s3Queue.process( S3_ACTIONS.DELETE_FILE, - (job) => transactionQueueWrapper(deleteFileFromS3Job(job), S3_ACTIONS.DELETE_FILE), + transactionQueueWrapper( + deleteFileFromS3Job, + S3_ACTIONS.DELETE_FILE, + ), ); }; diff --git a/src/services/scanQueue.js b/src/services/scanQueue.js index 34e9fe461c..dc502ba125 100644 --- a/src/services/scanQueue.js +++ b/src/services/scanQueue.js @@ -36,7 +36,8 @@ const processScanQueue = () => { scanQueue.on('failed', onFailedScanQueue); scanQueue.on('completed', onCompletedScanQueue); increaseListeners(scanQueue); - scanQueue.process((job) => transactionQueueWrapper(processFile(job.data.key))); + const process = (job) => processFile(job.data.key); + scanQueue.process(transactionQueueWrapper(process)); }; export { From efbf45c5844fbfb2bb04408104681b5453b78718 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Mon, 1 Jul 2024 07:36:16 -0700 Subject: [PATCH 21/60] Update communication-logs.sql --- src/queries/communication-logs.sql | 42 ++++++++++++++++-------------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/src/queries/communication-logs.sql b/src/queries/communication-logs.sql index 907ebe7856..24df2445b8 100644 --- a/src/queries/communication-logs.sql +++ b/src/queries/communication-logs.sql @@ -1,15 +1,19 @@ /** +* @name: Communication Log Report +* @description: A comprehensive report of communication logs based on several criteria. +* @defaultOutputName: communication_log_report +* * This query collects all the communication logs based on several criteria. * -* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values +* The query results are filterable by the SSDI flags. All SSDI flags are passed as an array of values. * The following are the available flags within this script: -* - ssdi.regionIds - one or more values for 1 through 12 -* - ssdi.recipients - one or more verbatim recipient names -* - ssdi.users - one or more verbatim user names -* - ssdi.role - one or more verbatim role names -* - ssdi.method - one or more verbatim method names -* - ssdi.communicationDate - two dates defining a range for the communicationDate to be within -* - ssdi.uei - one or more verbatim UEI values +* - ssdi.regionIds - integer[] - one or more values for 1 through 12 +* - ssdi.recipients - text[] - one or more verbatim recipient names +* - ssdi.users - text[] - one or more verbatim user names +* - ssdi.role - text[] - one or more verbatim role names +* - ssdi.method - text[] - one or more verbatim method names +* - ssdi.communicationDate - date[] - two dates defining a range for the communicationDate to be within +* - ssdi.uei - text[] - one or more verbatim UEI values * * Zero or more SSDI flags can be set within the same transaction as the query is executed. * The following is an example of how to set a SSDI flag: @@ -19,17 +23,17 @@ SELECT r.name, r.uei, - u.name "user", - STRING_AGG(DISTINCT rr.name, ' ') "roles", + u.name AS "user", + STRING_AGG(DISTINCT rr.name, ' ') AS "roles", cl."createdAt", - COALESCE(cl.data ->> 'method', '') method, - cl.data ->> 'result' result, - COALESCE(cl.data ->> 'purpose', '') purpose, - COALESCE(cl.data ->> 'duration', '') duration, - COALESCE(cl.data ->> 'regionId', '') region, - to_date(cl.data ->> 'communicationDate', 'MM/DD/YYYY') "communicationDate", - COALESCE(cl.data ->> 'pocComplete', '') "pocComplete", - COALESCE(cl.data ->> 'notes', '') "notes", + COALESCE(cl.data ->> 'method', '') AS "method", + cl.data ->> 'result' AS "result", + COALESCE(cl.data ->> 'purpose', '') AS "purpose", + COALESCE(cl.data ->> 'duration', '') AS "duration", + COALESCE(cl.data ->> 'regionId', '') AS "region", + to_date(cl.data ->> 'communicationDate', 'MM/DD/YYYY') AS "communicationDate", + COALESCE(cl.data ->> 'pocComplete', '') AS "pocComplete", + COALESCE(cl.data ->> 'notes', '') AS "notes", COALESCE(( SELECT jsonb_agg(elem) FROM jsonb_array_elements(cl.data -> 'recipientNextSteps') elem @@ -107,4 +111,4 @@ AND cl.data -> 'pageState' ->> '1' = 'Complete' AND cl.data -> 'pageState' ->> '2' = 'Complete' AND cl.data -> 'pageState' ->> '3' = 'Complete' GROUP BY 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 -ORDER BY 1,11; +ORDER BY 1, 11; From ffab8c4f2d435f80e98b5128ab40c2d6f35817cd Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Mon, 1 Jul 2024 16:34:46 -0700 Subject: [PATCH 22/60] plumbing referenceData --- src/lib/mailer/index.js | 19 +++++++++++ src/lib/maintenance/common.js | 3 +- src/lib/uuidConverter.test.js | 39 +++++++++++++++++++++++ src/lib/uuidConverter.ts | 24 ++++++++++++++ src/services/resourceQueue.js | 2 ++ src/services/s3Queue.js | 2 ++ src/services/scanQueue.js | 6 +++- src/worker.ts | 31 ++++++++++-------- src/workers/referenceData.ts | 28 +++++++++++++++++ src/workers/transactionWrapper.ts | 52 ++++++++++++++++++------------- 10 files changed, 169 insertions(+), 37 deletions(-) create mode 100644 src/lib/uuidConverter.test.js create mode 100644 src/lib/uuidConverter.ts create mode 100644 src/workers/referenceData.ts diff --git a/src/lib/mailer/index.js b/src/lib/mailer/index.js index 81717eccef..412e882969 100644 --- a/src/lib/mailer/index.js +++ b/src/lib/mailer/index.js @@ -1,4 +1,5 @@ /* eslint-disable @typescript-eslint/return-await */ +import httpContext from 'express-http-context'; import { createTransport } from 'nodemailer'; import { uniq } from 'lodash'; import { QueryTypes } from 'sequelize'; @@ -18,6 +19,7 @@ import { import { userById } from '../../services/users'; import logEmailNotification from './logNotifications'; import transactionQueueWrapper from '../../workers/transactionWrapper'; +import referenceData from '../../workers/referenceData'; export const notificationQueue = newQueue('notifications'); @@ -356,6 +358,7 @@ export const collaboratorAssignedNotification = (report, newCollaborators) => { const data = { report, newCollaborator: collaborator.user, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.COLLABORATOR_ADDED, data); } catch (err) { @@ -371,6 +374,7 @@ export const approverAssignedNotification = (report, newApprovers) => { const data = { report, newApprover: approver, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.SUBMITTED, data); } catch (err) { @@ -386,6 +390,7 @@ export const reportApprovedNotification = (report, authorWithSetting, collabsWit report, authorWithSetting, collabsWithSettings, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.APPROVED, data); } catch (err) { @@ -409,6 +414,7 @@ export const programSpecialistRecipientReportApprovedNotification = ( report, programSpecialists, recipients, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED, data); } catch (err) { @@ -483,6 +489,7 @@ export const trVisionAndGoalComplete = async (event) => { emailTo: [user.email], debugMessage: `MAILER: Notifying ${user.email} that a POC completed work on TR ${event.id} | ${eId}`, templatePath: 'tr_poc_vision_goal_complete', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_POC_VISION_GOAL_COMPLETE, data); @@ -515,6 +522,7 @@ export const trPocSessionComplete = async (event) => { emailTo: [user.email], debugMessage: `MAILER: Notifying ${user.email} that a POC completed work on TR ${event.id}`, templatePath: 'tr_poc_session_complete', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_POC_SESSION_COMPLETE, data); @@ -551,6 +559,7 @@ export const trSessionCreated = async (event) => { ...event, displayId: eventId, }, + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_SESSION_CREATED, data); @@ -583,6 +592,7 @@ export const trSessionCompleted = async (event) => { emailTo: [user.email], debugMessage: `MAILER: Notifying ${user.email} that a session was completed for TR ${event.id}`, templatePath: 'tr_session_completed', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_SESSION_COMPLETED, data); })); @@ -620,6 +630,7 @@ export const trCollaboratorAdded = async ( emailTo: [collaborator.email], templatePath: 'tr_collaborator_added', debugMessage: `MAILER: Notifying ${collaborator.email} that they were added as a collaborator to TR ${report.id}`, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_COLLABORATOR_ADDED, data); @@ -652,6 +663,7 @@ export const trPocAdded = async ( emailTo: [poc.email], debugMessage: `MAILER: Notifying ${poc.email} that they were added as a collaborator to TR ${report.id}`, templatePath: 'tr_poc_added', + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_POC_ADDED, data); @@ -687,6 +699,7 @@ export const trPocEventComplete = async ( reportPath, debugMessage: `MAILER: Notifying ${user.email} that TR ${event.id} is complete`, templatePath: 'tr_event_complete', + ...referenceData(), }; return notificationQueue.add(EMAIL_ACTIONS.TRAINING_REPORT_EVENT_COMPLETED, data); @@ -709,6 +722,7 @@ export const changesRequestedNotification = ( approver, authorWithSetting, collabsWithSettings, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.NEEDS_ACTION, data); } catch (err) { @@ -743,6 +757,7 @@ export async function collaboratorDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.COLLABORATOR_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.COLLABORATOR_DIGEST, data); return data; @@ -780,6 +795,7 @@ export async function changesRequestedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.NEEDS_ACTION_DIGEST, data); @@ -818,6 +834,7 @@ export async function submittedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.SUBMITTED_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.SUBMITTED_DIGEST, data); @@ -857,6 +874,7 @@ export async function approvedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.APPROVED_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.APPROVED_DIGEST, data); @@ -919,6 +937,7 @@ export async function recipientApprovedDigest(freq, subjectFreq) { type: EMAIL_ACTIONS.RECIPIENT_APPROVED_DIGEST, freq, subjectFreq, + ...referenceData(), }; notificationQueue.add(EMAIL_ACTIONS.RECIPIENT_APPROVED_DIGEST, data); diff --git a/src/lib/maintenance/common.js b/src/lib/maintenance/common.js index 60aac879fc..5bcb5239c4 100644 --- a/src/lib/maintenance/common.js +++ b/src/lib/maintenance/common.js @@ -6,6 +6,7 @@ const { MAINTENANCE_TYPE, MAINTENANCE_CATEGORY } = require('../../constants'); const { auditLogger, logger } = require('../../logger'); const { default: LockManager } = require('../lockManager'); const { default: transactionQueueWrapper } = require('../../workers/transactionWrapper'); +const { default: referenceData } = require('../../workers/referenceData'); const maintenanceQueue = newQueue('maintenance'); const maintenanceQueueProcessors = {}; @@ -131,7 +132,7 @@ const enqueueMaintenanceJob = async ( if (category in maintenanceQueueProcessors) { try { // Add the job to the maintenance queue - maintenanceQueue.add(category, data); + maintenanceQueue.add(category, { ...data, ...referenceData() }); } catch (err) { // Log any errors that occur when adding the job to the queue auditLogger.error(err); diff --git a/src/lib/uuidConverter.test.js b/src/lib/uuidConverter.test.js new file mode 100644 index 0000000000..62f839b6b7 --- /dev/null +++ b/src/lib/uuidConverter.test.js @@ -0,0 +1,39 @@ +// uuidConverter.test.ts +import { validate as validateUUID } from 'uuid'; +import convertToUUID from './uuidConverter'; + +describe('convertToUUID', () => { + it('should return a valid UUID when given a numeric string', () => { + const jobId = '123'; + const uuid = convertToUUID(jobId); + expect(validateUUID(uuid)).toBe(true); + }); + + it('should return the same UUID when given a valid UUID', () => { + const jobId = '550e8400-e29b-41d4-a716-446655440000'; + const uuid = convertToUUID(jobId); + expect(uuid).toBe(jobId); + }); + + it('should return a valid UUID when given a custom string', () => { + const jobId = 'custom-job-id-123'; + const uuid = convertToUUID(jobId); + expect(validateUUID(uuid)).toBe(true); + }); + + it('should return different UUIDs for different numeric strings', () => { + const jobId1 = '123'; + const jobId2 = '124'; + const uuid1 = convertToUUID(jobId1); + const uuid2 = convertToUUID(jobId2); + expect(uuid1).not.toBe(uuid2); + }); + + it('should return different UUIDs for different custom strings', () => { + const jobId1 = 'custom-job-id-123'; + const jobId2 = 'custom-job-id-124'; + const uuid1 = convertToUUID(jobId1); + const uuid2 = convertToUUID(jobId2); + expect(uuid1).not.toBe(uuid2); + }); +}); diff --git a/src/lib/uuidConverter.ts b/src/lib/uuidConverter.ts new file mode 100644 index 0000000000..94429b9b69 --- /dev/null +++ b/src/lib/uuidConverter.ts @@ -0,0 +1,24 @@ +import { v5 as uuidv5, validate as validateUUID, v4 as uuidv4 } from 'uuid'; + +const UUID_NAMESPACE = uuidv4(); // You can use a fixed namespace UUID + +/** + * Converts a given job ID to a UUID format. + * @param jobId - The job ID to be converted. + * @returns The UUID formatted job ID. + */ +export default function convertToUUID(jobId: string): string { + // Check if jobId is already a valid UUID + if (validateUUID(jobId)) { + return jobId; + } + + // Check if jobId is a numeric string + if (/^\d+$/.test(jobId)) { + // Convert the numeric string to a UUID using a namespace + return uuidv5(jobId, UUID_NAMESPACE); + } + + // If jobId is a custom string, generate a UUID based on the string + return uuidv5(jobId, UUID_NAMESPACE); +} diff --git a/src/services/resourceQueue.js b/src/services/resourceQueue.js index eee5f74fa2..5c957500cd 100644 --- a/src/services/resourceQueue.js +++ b/src/services/resourceQueue.js @@ -3,6 +3,7 @@ import { RESOURCE_ACTIONS } from '../constants'; import { logger, auditLogger } from '../logger'; import { getResourceMetaDataJob } from '../lib/resource'; import transactionQueueWrapper from '../workers/transactionWrapper'; +import referenceData from '../workers/referenceData'; const resourceQueue = newQueue('resource'); @@ -18,6 +19,7 @@ const addGetResourceMetadataToQueue = async (id, url) => { resourceId: id, resourceUrl: url, key: RESOURCE_ACTIONS.GET_METADATA, + ...referenceData(), }; return resourceQueue.add( RESOURCE_ACTIONS.GET_METADATA, diff --git a/src/services/s3Queue.js b/src/services/s3Queue.js index 446cbb6c4c..e28a4730b6 100644 --- a/src/services/s3Queue.js +++ b/src/services/s3Queue.js @@ -3,6 +3,7 @@ import { S3_ACTIONS } from '../constants'; import { logger, auditLogger } from '../logger'; import { deleteFileFromS3Job } from '../lib/s3'; import transactionQueueWrapper from '../workers/transactionWrapper'; +import referenceData from '../workers/referenceData'; const s3Queue = newQueue('s3'); @@ -12,6 +13,7 @@ const addDeleteFileToQueue = (id, key) => { fileId: id, fileKey: key, key: S3_ACTIONS.DELETE_FILE, + ...referenceData(), }; s3Queue.add(S3_ACTIONS.DELETE_FILE, data); return data; diff --git a/src/services/scanQueue.js b/src/services/scanQueue.js index dc502ba125..3fa1575f5b 100644 --- a/src/services/scanQueue.js +++ b/src/services/scanQueue.js @@ -2,6 +2,7 @@ import newQueue, { increaseListeners } from '../lib/queue'; import { logger, auditLogger } from '../logger'; import processFile from '../workers/files'; import transactionQueueWrapper from '../workers/transactionWrapper'; +import referenceData from '../workers/referenceData'; const scanQueue = newQueue('scan'); const addToScanQueue = (fileKey) => { @@ -13,7 +14,10 @@ const addToScanQueue = (fileKey) => { }; return scanQueue.add( - fileKey, + { + ...fileKey, + ...referenceData(), + }, { attempts: retries, backoff: backOffOpts, diff --git a/src/worker.ts b/src/worker.ts index a7807f9767..c471c81baf 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -6,6 +6,7 @@ if (process.env.NODE_ENV === 'production') { import {} from 'dotenv/config'; import throng from 'throng'; +import httpContext from 'express-http-context'; import { processScanQueue, } from './services/scanQueue'; @@ -28,25 +29,29 @@ import { // Number of workers to spawn const workers = process.env.WORKER_CONCURRENCY || 2; -// Pull jobs off the redis queue and process them. +// Wrap your process functions to use httpContext async function start(context: { id: number }) { - // File Scanning Queue - processScanQueue(); + httpContext.ns.run(() => { + httpContext.set('workerId', context.id); - // AWS Elasticsearch Queue - processAWSElasticsearchQueue(); + // File Scanning Queue + processScanQueue(); - // S3 Queue. - processS3Queue(); + // AWS Elasticsearch Queue + processAWSElasticsearchQueue(); - // Resource Queue. - processResourceQueue(); + // S3 Queue. + processS3Queue(); - // Notifications Queue - processNotificationQueue(); + // Resource Queue. + processResourceQueue(); - // Maintenance Queue - processMaintenanceQueue(); + // Notifications Queue + processNotificationQueue(); + + // Maintenance Queue + processMaintenanceQueue(); + }); } // spawn workers and start them diff --git a/src/workers/referenceData.ts b/src/workers/referenceData.ts new file mode 100644 index 0000000000..ea21b58ee6 --- /dev/null +++ b/src/workers/referenceData.ts @@ -0,0 +1,28 @@ +import httpContext from 'express-http-context'; + +interface ReferenceData { + referenceData: { + userId: number | undefined; + impersonationId: number | undefined; + transactionId: string | undefined; + sessionSig: string | undefined; + } +} + +const referenceData = (): ReferenceData => { + const userId = httpContext.get('loggedUser') as number | undefined; + const impersonationId = httpContext.get('impersonationUserId') as number | undefined; + const transactionId = httpContext.get('transactionId') as string | undefined; + const sessionSig = httpContext.get('sessionSig') as string | undefined; + + return { + referenceData: { + userId, + impersonationId, + transactionId, + sessionSig, + }, + }; +}; + +export default referenceData; diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index e0b64d71ce..244c57db09 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -1,4 +1,5 @@ -import { Sequelize } from 'sequelize'; +import httpContext from 'express-http-context'; +import convertToUUID from '../lib/uuidConverter'; import { addAuditTransactionSettings, removeFromAuditedTransactions } from '../models/auditModelGenerator'; import { sequelize } from '../models'; import { handleWorkerErrors } from '../lib/apiErrorHandler'; @@ -20,27 +21,34 @@ const transactionQueueWrapper = ( ) => async (job: Job): Promise => { let error: Error | undefined; const startTime = Date.now(); - try { - return sequelize.transaction(async () => { - let result; - try { - // eslint-disable-next-line - await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); - result = await originalFunction(job); - const duration = Date.now() - startTime; - auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); - removeFromAuditedTransactions(); - } catch (err) { - auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${(err as Error).message}`); - error = err as Error; - throw err; - } - return result; - }); - } catch (err) { - await handleWorkerErrors(job, error || err, logContext); - throw error || err; - } + return httpContext.ns.runPromise(async () => { + httpContext.set('loggedUser', job.userId); + httpContext.set('impersonationUserId', job.impersonationUserId); + httpContext.set('transactionId', convertToUUID(job.id)); + httpContext.set('sessionSig', job.id); // TODO: what value should be used here + httpContext.set('auditDescriptor', originalFunction.name); + try { + return sequelize.transaction(async () => { + let result; + try { + // eslint-disable-next-line + await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); + result = await originalFunction(job); + const duration = Date.now() - startTime; + auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); + removeFromAuditedTransactions(); + } catch (err) { + auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${(err as Error).message}`); + error = err as Error; + throw err; + } + return result; + }); + } catch (err) { + await handleWorkerErrors(job, error || err, logContext); + throw error || err; + } + }); }; export default transactionQueueWrapper; From 9fb9baa72bf9e12e1d36a89f03413d0269570072 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Mon, 1 Jul 2024 23:31:57 -0700 Subject: [PATCH 23/60] more fixes --- src/lib/maintenance/common.test.js | 10 +++++++++- src/workers/transactionWrapper.ts | 4 ++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/lib/maintenance/common.test.js b/src/lib/maintenance/common.test.js index 471a165ff7..f3f0a87f5b 100644 --- a/src/lib/maintenance/common.test.js +++ b/src/lib/maintenance/common.test.js @@ -142,7 +142,15 @@ describe('Maintenance Queue', () => { jest.clearAllMocks(); }); it('should add a job to the maintenance queue if a processor is defined for the given category', () => { - const data = { test: 'enqueueMaintenanceJob - should add a job to the maintenance queue if a processor is defined for the given category' }; + const data = { + test: 'enqueueMaintenanceJob - should add a job to the maintenance queue if a processor is defined for the given category', + referenceData: { + impersonationId: undefined, + sessionSig: undefined, + transactionId: undefined, + userId: undefined, + }, + }; const category = 'test-category'; const processor = jest.fn(); addQueueProcessor(category, processor); diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index 244c57db09..bbdf60a85e 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -22,8 +22,8 @@ const transactionQueueWrapper = ( let error: Error | undefined; const startTime = Date.now(); return httpContext.ns.runPromise(async () => { - httpContext.set('loggedUser', job.userId); - httpContext.set('impersonationUserId', job.impersonationUserId); + httpContext.set('loggedUser', job.referenceData.userId); + httpContext.set('impersonationUserId', job.referenceData.impersonationUserId); httpContext.set('transactionId', convertToUUID(job.id)); httpContext.set('sessionSig', job.id); // TODO: what value should be used here httpContext.set('auditDescriptor', originalFunction.name); From 5a7f3e1020b61bfc79a73a00d8afaa29082f7497 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 2 Jul 2024 01:55:12 -0700 Subject: [PATCH 24/60] more fixes --- src/lib/apiErrorHandler.js | 111 +++++++++++++------------- src/lib/apiErrorHandler.test.js | 30 ++++++- src/lib/mailer/index.test.js | 133 +++++++++++++++++--------------- 3 files changed, 152 insertions(+), 122 deletions(-) diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 8658addd57..f8ab936cfe 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -13,7 +13,6 @@ import { sequelize } from '../models'; * @returns {Promise} - The ID of the stored request error, or null if storing failed. */ async function logRequestError(req, operation, error, logContext) { - // Check if error logging should be suppressed if ( operation !== 'SequelizeError' && process.env.SUPPRESS_ERROR_LOGGING @@ -23,27 +22,16 @@ async function logRequestError(req, operation, error, logContext) { } try { - // Prepare the response body for storage - const responseBody = typeof error === 'object' - && error !== null ? { ...error, errorStack: error?.stack } : error; + const responseBody = typeof error === 'object' && error !== null + ? { ...error, errorStack: error?.stack } + : error; - // Prepare the request body for storage const requestBody = { - ...(req.body - && typeof req.body === 'object' - && Object.keys(req.body).length > 0 - && { body: req.body }), - ...(req.params - && typeof req.params === 'object' - && Object.keys(req.params).length > 0 - && { params: req.params }), - ...(req.query - && typeof req.query === 'object' - && Object.keys(req.query).length > 0 - && { query: req.query }), + ...(req.body && typeof req.body === 'object' && Object.keys(req.body).length > 0 && { body: req.body }), + ...(req.params && typeof req.params === 'object' && Object.keys(req.params).length > 0 && { params: req.params }), + ...(req.query && typeof req.query === 'object' && Object.keys(req.query).length > 0 && { query: req.query }), }; - // Create a request error in the database and get its ID const requestErrorId = await createRequestError({ operation, uri: req.originalUrl, @@ -69,7 +57,6 @@ async function logRequestError(req, operation, error, logContext) { * @param {Object} logContext - The context for logging. */ export const handleError = async (req, res, error, logContext) => { - // Check if the environment is development if (process.env.NODE_ENV === 'development') { logger.error(error); } @@ -77,7 +64,6 @@ export const handleError = async (req, res, error, logContext) => { let operation; let label; - // Check if the error is an instance of Sequelize.Error if (error instanceof Sequelize.Error) { operation = 'SequelizeError'; label = 'Sequelize error'; @@ -86,24 +72,15 @@ export const handleError = async (req, res, error, logContext) => { label = 'UNEXPECTED ERROR'; } - // eslint-disable-next-line max-len - if (error instanceof Sequelize.ConnectionError || error instanceof Sequelize.ConnectionAcquireTimeoutError) { + if (error instanceof Sequelize.ConnectionError + || error instanceof Sequelize.ConnectionAcquireTimeoutError) { logger.error(`${logContext.namespace} Connection Pool: ${JSON.stringify(sequelize.connectionManager.pool)}`); } - // Log the request error and get the error ID const requestErrorId = await logRequestError(req, operation, error, logContext); - let errorMessage; - - // Check if the error has a stack property - if (error?.stack) { - errorMessage = error.stack; - } else { - errorMessage = error; - } + const errorMessage = error?.stack || error; - // Log the error message with the error ID if available if (requestErrorId) { logger.error(`${logContext.namespace} - id: ${requestErrorId} ${label} - ${errorMessage}`); } else { @@ -114,12 +91,11 @@ export const handleError = async (req, res, error, logContext) => { }; /** - * Handles any unexpected errors in an error handler catch block - * - * @param {*} req - request - * @param {*} res - response - * @param {*} error - error - * @param {*} logContext - useful data for logging + * Handles any unexpected errors in an error handler catch block. + * @param {Object} req - The request object. + * @param {Object} res - The response object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. */ export function handleUnexpectedErrorInCatchBlock(req, res, error, logContext) { logger.error(`${logContext.namespace} - Unexpected error in catch block - ${error}`); @@ -128,11 +104,10 @@ export function handleUnexpectedErrorInCatchBlock(req, res, error, logContext) { /** * Handles API errors. Saves data in the RequestErrors table and sends 500 error. - * - * @param {*} req - request - * @param {*} res - response - * @param {*} error - error - * @param {*} logContext - useful data for logging + * @param {Object} req - The request object. + * @param {Object} res - The response object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. */ export default async function handleErrors(req, res, error, logContext) { try { @@ -142,6 +117,14 @@ export default async function handleErrors(req, res, error, logContext) { } } +/** + * Logs a worker error and stores it in the database. + * @param {Object} job - The job object. + * @param {string} operation - The operation name. + * @param {Error} error - The error object. + * @param {Object} logContext - The logging context. + * @returns {Promise} - The ID of the stored request error, or null if storing failed. + */ const logWorkerError = async (job, operation, error, logContext) => { if ( operation !== 'SequelizeError' @@ -152,23 +135,21 @@ const logWorkerError = async (job, operation, error, logContext) => { } try { - const responseBody = typeof error === 'object' - && error !== null ? { ...error, errorStack: error?.stack } : error; + const responseBody = typeof error === 'object' && error !== null + ? { ...error, errorStack: error?.stack } + : error; const requestBody = { - ...(job.data - && typeof job.data === 'object' - && Object.keys(job.data).length > 0 - && { data: job.data }), + ...(job.data && typeof job.data === 'object' && Object.keys(job.data).length > 0 && { data: job.data }), }; const requestErrorId = await createRequestError({ operation, - uri: job.queue.name, // Assuming the queue name serves as a URI equivalent - method: 'PROCESS_JOB', // Indicating this is a job processing operation + uri: job.queue.name, + method: 'PROCESS_JOB', requestBody, responseBody, - responseCode: 'INTERNAL_SERVER_ERROR', + responseCode: INTERNAL_SERVER_ERROR, }); return requestErrorId; @@ -179,6 +160,12 @@ const logWorkerError = async (job, operation, error, logContext) => { return null; }; +/** + * Handles errors in a worker job. + * @param {Object} job - The job object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. + */ export const handleWorkerError = async (job, error, logContext) => { if (process.env.NODE_ENV === 'development') { logger.error(error); @@ -202,13 +189,7 @@ export const handleWorkerError = async (job, error, logContext) => { const requestErrorId = await logWorkerError(job, operation, error, logContext); - let errorMessage; - - if (error?.stack) { - errorMessage = error.stack; - } else { - errorMessage = error; - } + const errorMessage = error?.stack || error; if (requestErrorId) { logger.error(`${logContext.namespace} - id: ${requestErrorId} ${label} - ${errorMessage}`); @@ -219,10 +200,22 @@ export const handleWorkerError = async (job, error, logContext) => { // Handle job failure as needed }; +/** + * Handles any unexpected errors in a worker error handler catch block. + * @param {Object} job - The job object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. + */ export const handleUnexpectedWorkerError = (job, error, logContext) => { logger.error(`${logContext.namespace} - Unexpected error in catch block - ${error}`); }; +/** + * Handles worker job errors. Logs the error and stores it in the database. + * @param {Object} job - The job object. + * @param {Error} error - The error object. + * @param {Object} logContext - The context for logging. + */ export const handleWorkerErrors = async (job, error, logContext) => { try { await handleWorkerError(job, error, logContext); diff --git a/src/lib/apiErrorHandler.test.js b/src/lib/apiErrorHandler.test.js index 59506f6e7e..ca3970f67d 100644 --- a/src/lib/apiErrorHandler.test.js +++ b/src/lib/apiErrorHandler.test.js @@ -54,6 +54,7 @@ describe('apiErrorHandler', () => { const requestErrors = await RequestErrors.findAll(); expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); }); it('handles a generic error', async () => { @@ -65,9 +66,10 @@ describe('apiErrorHandler', () => { const requestErrors = await RequestErrors.findAll(); expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('UNEXPECTED_ERROR'); }); - it('can handle unexpected error in catch block', async () => { + it('handles unexpected error in catch block', async () => { const mockUnexpectedErr = new Error('Unexpected error'); handleUnexpectedErrorInCatchBlock(mockRequest, mockResponse, mockUnexpectedErr, mockLogContext); @@ -77,4 +79,30 @@ describe('apiErrorHandler', () => { expect(requestErrors.length).toBe(0); }); + + it('handles error suppression when SUPPRESS_ERROR_LOGGING is true', async () => { + process.env.SUPPRESS_ERROR_LOGGING = 'true'; + await handleErrors(mockRequest, mockResponse, mockSequelizeError, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + + delete process.env.SUPPRESS_ERROR_LOGGING; + }); + + it('logs connection pool information on connection errors', async () => { + const mockConnectionError = new Sequelize.ConnectionError(new Error('Connection error')); + await handleErrors(mockRequest, mockResponse, mockConnectionError, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + // Ensure that connection pool information is logged + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); + }); }); diff --git a/src/lib/mailer/index.test.js b/src/lib/mailer/index.test.js index 9562612b43..788f983e16 100644 --- a/src/lib/mailer/index.test.js +++ b/src/lib/mailer/index.test.js @@ -1,5 +1,4 @@ import { createTransport } from 'nodemailer'; -import { REPORT_STATUSES } from '@ttahub/common'; import { notifyCollaboratorAssigned, notifyApproverAssigned, @@ -121,7 +120,7 @@ const mockRecipient = { const reportObject = { activityRecipientType: 'recipient', - submissionStatus: REPORT_STATUSES.DRAFT, + submissionStatus: 'Draft', userId: mockUser.id, regionId: 1, lastUpdatedById: mockUser.id, @@ -131,8 +130,7 @@ const reportObject = { const submittedReport = { ...reportObject, activityRecipients: [{ grantId: 1 }], - submissionStatus: REPORT_STATUSES.SUBMITTED, - // calculatedStatus: REPORT_STATUSES.SUBMITTED, + submissionStatus: 'Submitted', numberOfParticipants: 1, deliveryMethod: 'method', duration: 0, @@ -170,9 +168,10 @@ describe('mailer tests', () => { process.env = oldEnv; await db.sequelize.close(); }); + describe('Changes requested by manager', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyChangesRequested({ data: { report: mockReport, @@ -189,12 +188,12 @@ describe('mailer tests', () => { ]); const message = JSON.parse(email.message); expect(message.subject).toBe(`Activity Report ${mockReport.displayId}: Changes requested`); - expect(message.text).toContain(`${mockManager.name} requested changed to report ${mockReport.displayId}.`); + expect(message.text).toContain(`${mockManager.name} requested changes to report ${mockReport.displayId}.`); expect(message.text).toContain(mockApprover.note); expect(message.text).toContain(reportPath); }); it('Tests that an email is not sent if no recipients', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyChangesRequested({ data: { report: mockReport, @@ -206,19 +205,20 @@ describe('mailer tests', () => { expect(email).toBe(null); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyChangesRequested({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyChangesRequested({ data: { report: mockReport }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Report Approved', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyReportApproved({ data: { report: mockReport, - approver: mockApprover, authorWithSetting: mockReport.author, collabsWithSettings: [mockCollaborator1, mockCollaborator2], }, @@ -235,11 +235,10 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that an email is not sent if no recipients', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyReportApproved({ data: { report: mockReport, - approver: mockApprover, authorWithSetting: null, collabsWithSettings: [], }, @@ -247,15 +246,17 @@ describe('mailer tests', () => { expect(email).toBe(null); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyReportApproved({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyReportApproved({ data: { report: mockReport }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Program Specialists: Recipient Report Approved', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyRecipientReportApproved({ data: { report: mockReport, @@ -272,7 +273,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that an email is not sent if no program specialists/recipients', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyRecipientReportApproved({ data: { report: mockReport, @@ -283,19 +284,21 @@ describe('mailer tests', () => { expect(email).toBe(null); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyRecipientReportApproved({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyRecipientReportApproved({ data: { report: mockReport, programSpecialists: [mockProgramSpecialist], recipients: [mockRecipient], }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Manager Approval Requested', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyApproverAssigned({ data: { report: mockReport, newApprover: mockApprover }, }, jsonTransport); @@ -309,15 +312,17 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - expect(notifyApproverAssigned({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyApproverAssigned({ data: { report: mockReport }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); + describe('Add Collaborators', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyCollaboratorAssigned({ data: { report: mockReport, newCollaborator: mockNewCollaborator }, }, jsonTransport); @@ -331,16 +336,17 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - expect(notifyCollaboratorAssigned({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyCollaboratorAssigned({ data: { report: mockReport, newCollaborator: mockCollaborator1 }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); describe('sendTrainingReportNotification', () => { it('Tests that an email is sent', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; process.env.CI = ''; const data = { emailTo: [mockNewCollaborator.email], @@ -366,7 +372,7 @@ describe('mailer tests', () => { expect(message.text).toContain('/asdf/'); }); it('Honors no send', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; process.env.CI = ''; const data = { emailTo: [`no-send_${mockNewCollaborator.email}`], @@ -385,7 +391,7 @@ describe('mailer tests', () => { expect(email).toBeNull(); }); it('Tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; + process.env.SEND_NOTIFICATIONS = 'false'; const data = { emailTo: [mockNewCollaborator.email], templatePath: 'tr_session_completed', @@ -397,14 +403,15 @@ describe('mailer tests', () => { displayId: 'mockReport-1', }, }; - await expect(sendTrainingReportNotification({ + const email = await sendTrainingReportNotification({ data, - }, jsonTransport)).resolves.toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); it('Tests that emails are not sent on CI', async () => { - process.env.SEND_NOTIFICATIONS = true; - process.env.CI = true; + process.env.SEND_NOTIFICATIONS = 'true'; + process.env.CI = 'true'; const data = { emailTo: [mockNewCollaborator.email], templatePath: 'tr_session_completed', @@ -416,15 +423,16 @@ describe('mailer tests', () => { displayId: 'mockReport-1', }, }; - await expect(sendTrainingReportNotification({ + const email = await sendTrainingReportNotification({ data, - }, jsonTransport)).resolves.toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); describe('Collaborators digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -451,7 +459,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -476,7 +484,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -501,7 +509,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no new collaborator notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -523,21 +531,22 @@ describe('mailer tests', () => { }); it('tests that emails are not sent without SEND_NOTIFICATIONS', async () => { - process.env.SEND_NOTIFICATIONS = false; - await expect(notifyDigest({ + process.env.SEND_NOTIFICATIONS = 'false'; + const email = await notifyDigest({ data: { user: mockNewCollaborator, reports: [], type: EMAIL_ACTIONS.COLLABORATOR_DIGEST, freq: EMAIL_DIGEST_FREQ.DAILY, }, - }, jsonTransport)).toBeNull(); + }, jsonTransport); + expect(email).toBeNull(); }); }); describe('Changes requested digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -564,7 +573,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -589,7 +598,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -614,7 +623,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no changes requested notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -638,7 +647,7 @@ describe('mailer tests', () => { describe('Submitted digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -665,7 +674,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -690,7 +699,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -715,7 +724,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no submitted notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -739,7 +748,7 @@ describe('mailer tests', () => { describe('Approved digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -766,7 +775,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -791,7 +800,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -816,7 +825,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no approved reports notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { user: mockNewCollaborator, @@ -840,7 +849,7 @@ describe('mailer tests', () => { describe('Program Specialist: Report approved digest', () => { it('tests that an email is sent for a daily setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [mockReport], @@ -865,7 +874,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a weekly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [mockReport], @@ -890,7 +899,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent for a monthly setting', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [mockReport], @@ -915,7 +924,7 @@ describe('mailer tests', () => { expect(message.text).toContain(reportPath); }); it('tests that an email is sent if there are no approved reports notifications', async () => { - process.env.SEND_NOTIFICATIONS = true; + process.env.SEND_NOTIFICATIONS = 'true'; const email = await notifyDigest({ data: { reports: [], From 2bac37335408e0a4768eb9c39ea06470207b0463 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 2 Jul 2024 12:57:15 -0700 Subject: [PATCH 25/60] Update transactionWrapper.js --- src/routes/transactionWrapper.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/routes/transactionWrapper.js b/src/routes/transactionWrapper.js index 226ee61dc4..29694d8aa6 100644 --- a/src/routes/transactionWrapper.js +++ b/src/routes/transactionWrapper.js @@ -1,3 +1,4 @@ +import httpContext from 'express-http-context'; import { sequelize } from '../models'; import { addAuditTransactionSettings, removeFromAuditedTransactions } from '../models/auditModelGenerator'; import handleErrors from '../lib/apiErrorHandler'; @@ -14,7 +15,8 @@ export default function transactionWrapper(originalFunction, context = '') { let error; const startTime = Date.now(); try { - return sequelize.transaction(async () => { + return sequelize.transaction(async (transaction) => { + httpContext.set('transactionId', transaction.id); let result; try { await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); From 2259e60b97636968a3ba3510768f4eae0cce9c4c Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 2 Jul 2024 12:58:13 -0700 Subject: [PATCH 26/60] Update transactionWrapper.ts --- src/workers/transactionWrapper.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index bbdf60a85e..61ee57a980 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -28,7 +28,8 @@ const transactionQueueWrapper = ( httpContext.set('sessionSig', job.id); // TODO: what value should be used here httpContext.set('auditDescriptor', originalFunction.name); try { - return sequelize.transaction(async () => { + return sequelize.transaction(async (transaction) => { + httpContext.set('transactionId', transaction.id); let result; try { // eslint-disable-next-line From 431ec193519a460cd0a68f1e3d90a819c9f5b0a0 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 2 Jul 2024 12:59:01 -0700 Subject: [PATCH 27/60] Update transactionWrapper.ts --- src/workers/transactionWrapper.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index 61ee57a980..6ac5907310 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -24,7 +24,6 @@ const transactionQueueWrapper = ( return httpContext.ns.runPromise(async () => { httpContext.set('loggedUser', job.referenceData.userId); httpContext.set('impersonationUserId', job.referenceData.impersonationUserId); - httpContext.set('transactionId', convertToUUID(job.id)); httpContext.set('sessionSig', job.id); // TODO: what value should be used here httpContext.set('auditDescriptor', originalFunction.name); try { From 10fb3cffd6447e864619adbf21ff12c34b310038 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 3 Jul 2024 16:09:28 -0700 Subject: [PATCH 28/60] Update index.test.js --- src/lib/mailer/index.test.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/lib/mailer/index.test.js b/src/lib/mailer/index.test.js index 788f983e16..2d5b528fb5 100644 --- a/src/lib/mailer/index.test.js +++ b/src/lib/mailer/index.test.js @@ -1,4 +1,5 @@ import { createTransport } from 'nodemailer'; +import { REPORT_STATUSES } from '@ttahub/common'; import { notifyCollaboratorAssigned, notifyApproverAssigned, @@ -120,7 +121,7 @@ const mockRecipient = { const reportObject = { activityRecipientType: 'recipient', - submissionStatus: 'Draft', + submissionStatus: REPORT_STATUSES.DRAFT, userId: mockUser.id, regionId: 1, lastUpdatedById: mockUser.id, @@ -130,7 +131,7 @@ const reportObject = { const submittedReport = { ...reportObject, activityRecipients: [{ grantId: 1 }], - submissionStatus: 'Submitted', + submissionStatus: REPORT_STATUSES.SUBMITTED, numberOfParticipants: 1, deliveryMethod: 'method', duration: 0, From 4e47f5927b45f1cd387cf067c14fbc65f3904c5f Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 4 Jul 2024 12:00:22 -0700 Subject: [PATCH 29/60] Update html.pug --- email_templates/changes_requested_by_manager/html.pug | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/email_templates/changes_requested_by_manager/html.pug b/email_templates/changes_requested_by_manager/html.pug index 2f5b2b8383..c12e4f7a82 100644 --- a/email_templates/changes_requested_by_manager/html.pug +++ b/email_templates/changes_requested_by_manager/html.pug @@ -2,7 +2,7 @@ style include ../email.css p Hello, p -p #{managerName} requested changed to report #{displayId}. +p #{managerName} requested changes to report #{displayId}. if comments p #{managerName} provided the following comments: blockquote !{comments} From 5fb61f91485125bee5d419eb4499c42df1333d4b Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 4 Jul 2024 23:59:38 -0700 Subject: [PATCH 30/60] wrong order --- src/routes/courses/handlers.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/routes/courses/handlers.ts b/src/routes/courses/handlers.ts index 5c97b894fd..4ebaccc83f 100644 --- a/src/routes/courses/handlers.ts +++ b/src/routes/courses/handlers.ts @@ -28,7 +28,7 @@ export async function allCourses(req: Request, res: Response) { const courses = await getAllCourses(); res.json(courses); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -38,7 +38,7 @@ export async function getCourseById(req: Request, res: Response) { const course = await getById(Number(id)); res.json(course); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -70,7 +70,7 @@ export async function updateCourseById(req: Request, res: Response) { res.json(newCourse); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -90,7 +90,7 @@ export async function createCourseByName(req: Request, res: Response) { res.json(course); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } @@ -119,7 +119,7 @@ export async function deleteCourseById(req: Request, res: Response) { } res.status(204).send(); } catch (err) { - await handleErrors(err, req, res, logContext); + await handleErrors(req, res, err, logContext); } } From 05c15e24f18d21ee1b54f036d9bfcffd3d7020ac Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 9 Jul 2024 13:45:22 -0700 Subject: [PATCH 31/60] fixes --- src/services/requestErrors.js | 17 ++++++++++------- src/workers/transactionWrapper.ts | 6 ++---- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/src/services/requestErrors.js b/src/services/requestErrors.js index 8d823e3502..85e8435abb 100644 --- a/src/services/requestErrors.js +++ b/src/services/requestErrors.js @@ -1,5 +1,3 @@ -import models from '../models'; - export default async function createRequestError({ operation, uri, @@ -8,11 +6,12 @@ export default async function createRequestError({ responseBody = 'N/A', responseCode = 'N/A', }) { + const { RequestErrors } = require('../models'); try { const requestErrorBody = { operation, uri, method, requestBody, responseBody, responseCode, }; - const requestError = await models.RequestErrors.create(requestErrorBody, { transaction: null }); + const requestError = await RequestErrors.create(requestErrorBody, { transaction: null }); return requestError.id; } catch (err) { throw new Error('Error creating RequestError entry'); @@ -20,12 +19,13 @@ export default async function createRequestError({ } export async function requestErrors({ filter = '{}', range = '[0,9]', sort = '["createdAt","DESC"]' } = {}) { + const { RequestErrors } = require('../models'); const offset = JSON.parse(range)[0]; const limit = JSON.parse(range)[1]; const order = JSON.parse(sort); const where = JSON.parse(filter); - return models.RequestErrors.findAndCountAll({ + return RequestErrors.findAndCountAll({ where, order: [order], offset, @@ -34,20 +34,23 @@ export async function requestErrors({ filter = '{}', range = '[0,9]', sort = '[" } export async function requestErrorById(id) { - return models.RequestErrors.findOne({ + const { RequestErrors } = require('../models'); + return RequestErrors.findOne({ where: { id }, }); } export async function requestErrorsByIds({ filter = '{}' } = {}) { - return models.RequestErrors.findAll({ + const { RequestErrors } = require('../models'); + return RequestErrors.findAll({ where: JSON.parse(filter), attributes: ['id'], }); } export async function delRequestErrors({ filter = '{}' } = {}) { - return models.RequestErrors.destroy({ + const { RequestErrors } = require('../models'); + return RequestErrors.destroy({ where: JSON.parse(filter), }); } diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index 6ac5907310..bf08762883 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -19,7 +19,6 @@ const transactionQueueWrapper = ( context = '', // eslint-disable-next-line @typescript-eslint/no-explicit-any ) => async (job: Job): Promise => { - let error: Error | undefined; const startTime = Date.now(); return httpContext.ns.runPromise(async () => { httpContext.set('loggedUser', job.referenceData.userId); @@ -39,14 +38,13 @@ const transactionQueueWrapper = ( removeFromAuditedTransactions(); } catch (err) { auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${(err as Error).message}`); - error = err as Error; throw err; } return result; }); } catch (err) { - await handleWorkerErrors(job, error || err, logContext); - throw error || err; + await handleWorkerErrors(job, err, logContext); + throw err; } }); }; From ee106912b8a3a7044db11ffc1bb858c640cc640b Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 9 Jul 2024 14:04:09 -0700 Subject: [PATCH 32/60] Update requestErrors.js --- src/services/requestErrors.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/services/requestErrors.js b/src/services/requestErrors.js index 85e8435abb..56dafd5696 100644 --- a/src/services/requestErrors.js +++ b/src/services/requestErrors.js @@ -6,6 +6,7 @@ export default async function createRequestError({ responseBody = 'N/A', responseCode = 'N/A', }) { + // eslint-disable-next-line global-require const { RequestErrors } = require('../models'); try { const requestErrorBody = { @@ -19,6 +20,7 @@ export default async function createRequestError({ } export async function requestErrors({ filter = '{}', range = '[0,9]', sort = '["createdAt","DESC"]' } = {}) { + // eslint-disable-next-line global-require const { RequestErrors } = require('../models'); const offset = JSON.parse(range)[0]; const limit = JSON.parse(range)[1]; @@ -34,6 +36,7 @@ export async function requestErrors({ filter = '{}', range = '[0,9]', sort = '[" } export async function requestErrorById(id) { + // eslint-disable-next-line global-require const { RequestErrors } = require('../models'); return RequestErrors.findOne({ where: { id }, @@ -41,6 +44,7 @@ export async function requestErrorById(id) { } export async function requestErrorsByIds({ filter = '{}' } = {}) { + // eslint-disable-next-line global-require const { RequestErrors } = require('../models'); return RequestErrors.findAll({ where: JSON.parse(filter), @@ -49,6 +53,7 @@ export async function requestErrorsByIds({ filter = '{}' } = {}) { } export async function delRequestErrors({ filter = '{}' } = {}) { + // eslint-disable-next-line global-require const { RequestErrors } = require('../models'); return RequestErrors.destroy({ where: JSON.parse(filter), From 971cf73e300d952e1a4cab40f082bc8c9db89e31 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 10 Jul 2024 08:32:15 -0700 Subject: [PATCH 33/60] fix more tests --- src/lib/apiErrorHandler.js | 8 +++- src/lib/apiErrorHandler.test.js | 78 +++++++++++++++++++++++++++++++-- 2 files changed, 81 insertions(+), 5 deletions(-) diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index f8ab936cfe..605db81b6e 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -13,6 +13,8 @@ import { sequelize } from '../models'; * @returns {Promise} - The ID of the stored request error, or null if storing failed. */ async function logRequestError(req, operation, error, logContext) { + console.log('process.env.SUPPRESS_ERROR_LOGGING', process.env.SUPPRESS_ERROR_LOGGING); + console.log('operation', operation); if ( operation !== 'SequelizeError' && process.env.SUPPRESS_ERROR_LOGGING @@ -20,6 +22,9 @@ async function logRequestError(req, operation, error, logContext) { ) { return 0; } + if (!error) { + return 0; + } try { const responseBody = typeof error === 'object' && error !== null @@ -74,9 +79,8 @@ export const handleError = async (req, res, error, logContext) => { if (error instanceof Sequelize.ConnectionError || error instanceof Sequelize.ConnectionAcquireTimeoutError) { - logger.error(`${logContext.namespace} Connection Pool: ${JSON.stringify(sequelize.connectionManager.pool)}`); + logger.error(`${logContext.namespace} Connection Pool: ${JSON.stringify(sequelize?.connectionManager?.pool)}`); } - const requestErrorId = await logRequestError(req, operation, error, logContext); const errorMessage = error?.stack || error; diff --git a/src/lib/apiErrorHandler.test.js b/src/lib/apiErrorHandler.test.js index ca3970f67d..75fdc50306 100644 --- a/src/lib/apiErrorHandler.test.js +++ b/src/lib/apiErrorHandler.test.js @@ -1,7 +1,7 @@ import Sequelize from 'sequelize'; import { INTERNAL_SERVER_ERROR } from 'http-codes'; import db, { RequestErrors } from '../models'; -import handleErrors, { handleUnexpectedErrorInCatchBlock } from './apiErrorHandler'; +import handleErrors, { handleUnexpectedErrorInCatchBlock, handleWorkerErrors, handleUnexpectedWorkerError } from './apiErrorHandler'; const mockUser = { id: 47, @@ -31,6 +31,11 @@ const mockResponse = { })), }; +const mockJob = { + data: { jobDetail: 'example job detail' }, + queue: { name: 'exampleQueue' }, +}; + const mockSequelizeError = new Sequelize.Error('Not all ok here'); const mockLogContext = { @@ -82,7 +87,8 @@ describe('apiErrorHandler', () => { it('handles error suppression when SUPPRESS_ERROR_LOGGING is true', async () => { process.env.SUPPRESS_ERROR_LOGGING = 'true'; - await handleErrors(mockRequest, mockResponse, mockSequelizeError, mockLogContext); + const mockGenericError = new Error('Unknown error'); + await handleErrors(mockRequest, mockResponse, mockGenericError, mockLogContext); expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); @@ -99,7 +105,73 @@ describe('apiErrorHandler', () => { expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); - // Ensure that connection pool information is logged + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); + }); + + it('handles worker errors', async () => { + const mockWorkerError = new Error('Worker error'); + await handleWorkerErrors(mockJob, mockWorkerError, mockLogContext); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('UNEXPECTED_ERROR'); + }); + + it('handles worker Sequelize errors', async () => { + const mockSequelizeWorkerError = new Sequelize.Error('Sequelize worker error'); + await handleWorkerErrors(mockJob, mockSequelizeWorkerError, mockLogContext); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).not.toBe(0); + expect(requestErrors[0].operation).toBe('SequelizeError'); + }); + + it('handles unexpected worker error in catch block', async () => { + const mockUnexpectedWorkerError = new Error('Unexpected worker error'); + handleUnexpectedWorkerError(mockJob, mockUnexpectedWorkerError, mockLogContext); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + }); + + it('handles null error', async () => { + await handleErrors(mockRequest, mockResponse, null, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + }); + + it('handles undefined error', async () => { + await handleErrors(mockRequest, mockResponse, undefined, mockLogContext); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + + const requestErrors = await RequestErrors.findAll(); + + expect(requestErrors.length).toBe(0); + }); + + it('handles specific Sequelize connection acquire timeout error', async () => { + const mockConnectionAcquireTimeoutError = new Sequelize + .ConnectionAcquireTimeoutError(new Error('Connection acquire timeout error')); + await handleErrors( + mockRequest, + mockResponse, + mockConnectionAcquireTimeoutError, + mockLogContext, + ); + + expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + const requestErrors = await RequestErrors.findAll(); expect(requestErrors.length).not.toBe(0); From 3e8f381c24a9d038a39afdd5048c3a55b8e42111 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 10 Jul 2024 09:15:42 -0700 Subject: [PATCH 34/60] Update apiErrorHandler.js --- src/lib/apiErrorHandler.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 605db81b6e..9f6b038151 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -13,8 +13,6 @@ import { sequelize } from '../models'; * @returns {Promise} - The ID of the stored request error, or null if storing failed. */ async function logRequestError(req, operation, error, logContext) { - console.log('process.env.SUPPRESS_ERROR_LOGGING', process.env.SUPPRESS_ERROR_LOGGING); - console.log('operation', operation); if ( operation !== 'SequelizeError' && process.env.SUPPRESS_ERROR_LOGGING From 759c4e73cdac6b8f660b72b4528a9a351787fc17 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 10 Jul 2024 11:34:54 -0700 Subject: [PATCH 35/60] remove unused code --- src/lib/uuidConverter.test.js | 39 ------------------------------- src/lib/uuidConverter.ts | 24 ------------------- src/workers/transactionWrapper.ts | 1 - 3 files changed, 64 deletions(-) delete mode 100644 src/lib/uuidConverter.test.js delete mode 100644 src/lib/uuidConverter.ts diff --git a/src/lib/uuidConverter.test.js b/src/lib/uuidConverter.test.js deleted file mode 100644 index 62f839b6b7..0000000000 --- a/src/lib/uuidConverter.test.js +++ /dev/null @@ -1,39 +0,0 @@ -// uuidConverter.test.ts -import { validate as validateUUID } from 'uuid'; -import convertToUUID from './uuidConverter'; - -describe('convertToUUID', () => { - it('should return a valid UUID when given a numeric string', () => { - const jobId = '123'; - const uuid = convertToUUID(jobId); - expect(validateUUID(uuid)).toBe(true); - }); - - it('should return the same UUID when given a valid UUID', () => { - const jobId = '550e8400-e29b-41d4-a716-446655440000'; - const uuid = convertToUUID(jobId); - expect(uuid).toBe(jobId); - }); - - it('should return a valid UUID when given a custom string', () => { - const jobId = 'custom-job-id-123'; - const uuid = convertToUUID(jobId); - expect(validateUUID(uuid)).toBe(true); - }); - - it('should return different UUIDs for different numeric strings', () => { - const jobId1 = '123'; - const jobId2 = '124'; - const uuid1 = convertToUUID(jobId1); - const uuid2 = convertToUUID(jobId2); - expect(uuid1).not.toBe(uuid2); - }); - - it('should return different UUIDs for different custom strings', () => { - const jobId1 = 'custom-job-id-123'; - const jobId2 = 'custom-job-id-124'; - const uuid1 = convertToUUID(jobId1); - const uuid2 = convertToUUID(jobId2); - expect(uuid1).not.toBe(uuid2); - }); -}); diff --git a/src/lib/uuidConverter.ts b/src/lib/uuidConverter.ts deleted file mode 100644 index 94429b9b69..0000000000 --- a/src/lib/uuidConverter.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { v5 as uuidv5, validate as validateUUID, v4 as uuidv4 } from 'uuid'; - -const UUID_NAMESPACE = uuidv4(); // You can use a fixed namespace UUID - -/** - * Converts a given job ID to a UUID format. - * @param jobId - The job ID to be converted. - * @returns The UUID formatted job ID. - */ -export default function convertToUUID(jobId: string): string { - // Check if jobId is already a valid UUID - if (validateUUID(jobId)) { - return jobId; - } - - // Check if jobId is a numeric string - if (/^\d+$/.test(jobId)) { - // Convert the numeric string to a UUID using a namespace - return uuidv5(jobId, UUID_NAMESPACE); - } - - // If jobId is a custom string, generate a UUID based on the string - return uuidv5(jobId, UUID_NAMESPACE); -} diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index bf08762883..7b95c90845 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -1,5 +1,4 @@ import httpContext from 'express-http-context'; -import convertToUUID from '../lib/uuidConverter'; import { addAuditTransactionSettings, removeFromAuditedTransactions } from '../models/auditModelGenerator'; import { sequelize } from '../models'; import { handleWorkerErrors } from '../lib/apiErrorHandler'; From fd4efeac441a719fb5b42d53fa4716e4ccef002d Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 10 Jul 2024 12:47:32 -0700 Subject: [PATCH 36/60] adding await --- src/routes/transactionWrapper.js | 3 ++- src/workers/transactionWrapper.ts | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/routes/transactionWrapper.js b/src/routes/transactionWrapper.js index 29694d8aa6..74be9ccf98 100644 --- a/src/routes/transactionWrapper.js +++ b/src/routes/transactionWrapper.js @@ -15,7 +15,8 @@ export default function transactionWrapper(originalFunction, context = '') { let error; const startTime = Date.now(); try { - return sequelize.transaction(async (transaction) => { + // eslint-disable-next-line @typescript-eslint/return-await + return await sequelize.transaction(async (transaction) => { httpContext.set('transactionId', transaction.id); let result; try { diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index 7b95c90845..81dcf53ae1 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -25,7 +25,8 @@ const transactionQueueWrapper = ( httpContext.set('sessionSig', job.id); // TODO: what value should be used here httpContext.set('auditDescriptor', originalFunction.name); try { - return sequelize.transaction(async (transaction) => { + // eslint-disable-next-line @typescript-eslint/return-await + return await sequelize.transaction(async (transaction) => { httpContext.set('transactionId', transaction.id); let result; try { From 3c381be79148f15fe6c63eb970ea2e072cc7a762 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Wed, 10 Jul 2024 17:39:47 -0700 Subject: [PATCH 37/60] cleanup --- src/routes/transactionWrapper.js | 6 ++---- src/workers/transactionWrapper.ts | 2 +- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/src/routes/transactionWrapper.js b/src/routes/transactionWrapper.js index 74be9ccf98..e0a79379c6 100644 --- a/src/routes/transactionWrapper.js +++ b/src/routes/transactionWrapper.js @@ -12,7 +12,6 @@ const logContext = { export default function transactionWrapper(originalFunction, context = '') { return async function wrapper(req, res, next) { - let error; const startTime = Date.now(); try { // eslint-disable-next-line @typescript-eslint/return-await @@ -25,15 +24,14 @@ export default function transactionWrapper(originalFunction, context = '') { const duration = Date.now() - startTime; auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); removeFromAuditedTransactions(); + return result; } catch (err) { auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${err.message}`); - error = err; throw err; } - return result; }); } catch (err) { - return handleErrors(req, res, error || err, logContext); + return handleErrors(req, res, err, logContext); } }; } diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index 81dcf53ae1..45cc2512f5 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -36,11 +36,11 @@ const transactionQueueWrapper = ( const duration = Date.now() - startTime; auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); removeFromAuditedTransactions(); + return result; } catch (err) { auditLogger.error(`Error executing ${originalFunction.name} ${context}: ${(err as Error).message}`); throw err; } - return result; }); } catch (err) { await handleWorkerErrors(job, err, logContext); From de51a811226a28def7ae71c07b71203fd7d3c8a2 Mon Sep 17 00:00:00 2001 From: nvms Date: Thu, 11 Jul 2024 12:41:03 -0400 Subject: [PATCH 38/60] initial pass --- frontend/src/fetchers/activityReports.js | 5 ++ frontend/src/pages/ActivityReport/index.js | 4 +- src/routes/activityReports/handlers.js | 8 ++++ src/routes/activityReports/index.js | 2 + src/services/activityReports.js | 56 +++++++++++++++------- 5 files changed, 56 insertions(+), 19 deletions(-) diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index 1569eb760f..392899455b 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -102,6 +102,11 @@ export const getRecipients = async (region) => { return recipients.json(); }; +export const getRecipientsForAR = async (region, reportId) => { + const recipients = await get(join(activityReportUrl, 'activity-recipients/', reportId, '/', `?region=${region}`)); + return recipients.json(); +}; + export const getGoals = async (grantIds) => { const params = grantIds.map((grantId) => `grantIds=${grantId}`); const url = join(activityReportUrl, 'goals', `?${params.join('&')}`); diff --git a/frontend/src/pages/ActivityReport/index.js b/frontend/src/pages/ActivityReport/index.js index 41bded708a..efac0b8777 100644 --- a/frontend/src/pages/ActivityReport/index.js +++ b/frontend/src/pages/ActivityReport/index.js @@ -34,7 +34,7 @@ import { submitReport, saveReport, getReport, - getRecipients, + getRecipientsForAR, createReport, getCollaborators, getApprovers, @@ -278,7 +278,7 @@ function ActivityReport({ } const apiCalls = [ - getRecipients(report.regionId), + getRecipientsForAR(report.regionId, reportId.current), getCollaborators(report.regionId), getApprovers(report.regionId), getGroupsForActivityReport(report.regionId), diff --git a/src/routes/activityReports/handlers.js b/src/routes/activityReports/handlers.js index a9b6ae6233..e2aeba2640 100644 --- a/src/routes/activityReports/handlers.js +++ b/src/routes/activityReports/handlers.js @@ -722,6 +722,14 @@ export async function getActivityRecipients(req, res) { res.json(activityRecipients); } +export async function getActivityRecipientsReportId(req, res) { + const { region } = req.query; + const { reportId } = req.params; + const targetRegion = parseInt(region, DECIMAL_BASE); + const activityRecipients = await possibleRecipients(targetRegion, reportId); + res.json(activityRecipients); +} + /** * Retrieve an activity report * diff --git a/src/routes/activityReports/index.js b/src/routes/activityReports/index.js index e787b72cd8..93f74a0ba9 100644 --- a/src/routes/activityReports/index.js +++ b/src/routes/activityReports/index.js @@ -9,6 +9,7 @@ import { getReports, getReportAlerts, getActivityRecipients, + getActivityRecipientsReportId, getGoals, reviewReport, resetToDraft, @@ -40,6 +41,7 @@ router.post('/', transactionWrapper(createReport)); router.get('/approvers', transactionWrapper(getApprovers)); router.get('/groups', transactionWrapper(getGroups)); router.get('/activity-recipients', transactionWrapper(getActivityRecipients)); +router.get('/activity-recipients/:reportId', transactionWrapper(getActivityRecipientsReportId)); router.get('/goals', transactionWrapper(getGoals)); router.post('/goals', transactionWrapper(createGoalsForReport)); router.post('/objectives', transactionWrapper(saveOtherEntityObjectivesForReport)); diff --git a/src/services/activityReports.js b/src/services/activityReports.js index 1e2dcfe398..3edd6432ee 100644 --- a/src/services/activityReports.js +++ b/src/services/activityReports.js @@ -14,6 +14,7 @@ import { ActivityReportCollaborator, ActivityReportFile, sequelize, + Sequelize, ActivityRecipient, File, Grant, @@ -1125,33 +1126,54 @@ export async function setStatus(report, status) { * @param {number} [regionId] - A region id to query against * @returns {*} Grants and Other entities */ -export async function possibleRecipients(regionId) { - const where = { status: 'Active', regionId }; - +export async function possibleRecipients(regionId, activityReportId = null) { const grants = await Recipient.findAll({ - attributes: ['id', 'name'], + attributes: [ + 'id', + 'name', + ], order: ['name'], - include: [{ - where, - model: Grant, - as: 'grants', - attributes: [['id', 'activityRecipientId'], 'name', 'number'], - include: [{ - model: Recipient, - as: 'recipient', - }, + include: [ { - model: Program, - as: 'programs', - attributes: ['programType'], + model: Grant, + as: 'grants', + attributes: ['number', ['id', 'activityRecipientId'], 'name'], + required: true, + include: [ + { + model: Program, + as: 'programs', + attributes: ['programType'], + required: true, + }, + { + model: Recipient, + as: 'recipient', + required: true, + }, + { + model: ActivityRecipient, + as: 'activityRecipients', + attributes: [], + required: false, + }, + ], }, + ], + where: { + '$grants.regionId$': regionId, + [Op.or]: [ + { '$grants.status$': 'Active' }, + { '$grants->activityRecipients.activityReportId$': activityReportId }, ], - }], + }, }); + const otherEntities = await OtherEntity.findAll({ raw: true, attributes: [['id', 'activityRecipientId'], 'name'], }); + return { grants, otherEntities }; } From e8832d1b950171067d2d2a16f8fed74191494492 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Thu, 11 Jul 2024 10:11:58 -0700 Subject: [PATCH 39/60] updates per codereview --- src/lib/apiErrorHandler.js | 10 ++++++++-- src/lib/apiErrorHandler.test.js | 11 +++++++++++ src/routes/transactionWrapper.js | 3 +-- src/workers/transactionWrapper.ts | 5 ++--- 4 files changed, 22 insertions(+), 7 deletions(-) diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 9f6b038151..3b3ecd8d69 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -77,7 +77,10 @@ export const handleError = async (req, res, error, logContext) => { if (error instanceof Sequelize.ConnectionError || error instanceof Sequelize.ConnectionAcquireTimeoutError) { - logger.error(`${logContext.namespace} Connection Pool: ${JSON.stringify(sequelize?.connectionManager?.pool)}`); + const pool = sequelize?.connectionManager?.pool; + const usedConnections = pool ? pool.used.length : null; + const waitingConnections = pool ? pool.pending.length : null; + logger.error(`${logContext.namespace} Connection Pool: Used Connections - ${usedConnections}, Waiting Connections - ${waitingConnections}`); } const requestErrorId = await logRequestError(req, operation, error, logContext); @@ -186,7 +189,10 @@ export const handleWorkerError = async (job, error, logContext) => { if (error instanceof Sequelize.ConnectionError || error instanceof Sequelize.ConnectionAcquireTimeoutError) { - logger.error(`${logContext.namespace} Connection Pool: ${JSON.stringify(sequelize.connectionManager.pool)}`); + const pool = sequelize?.connectionManager?.pool; + const usedConnections = pool ? pool.used.length : null; + const waitingConnections = pool ? pool.pending.length : null; + logger.error(`${logContext.namespace} Connection Pool: Used Connections - ${usedConnections}, Waiting Connections - ${waitingConnections}`); } const requestErrorId = await logWorkerError(job, operation, error, logContext); diff --git a/src/lib/apiErrorHandler.test.js b/src/lib/apiErrorHandler.test.js index 75fdc50306..fca179fedd 100644 --- a/src/lib/apiErrorHandler.test.js +++ b/src/lib/apiErrorHandler.test.js @@ -2,6 +2,7 @@ import Sequelize from 'sequelize'; import { INTERNAL_SERVER_ERROR } from 'http-codes'; import db, { RequestErrors } from '../models'; import handleErrors, { handleUnexpectedErrorInCatchBlock, handleWorkerErrors, handleUnexpectedWorkerError } from './apiErrorHandler'; +import { auditLogger as logger } from '../logger'; const mockUser = { id: 47, @@ -42,10 +43,18 @@ const mockLogContext = { namespace: 'TEST', }; +jest.mock('../logger', () => ({ + auditLogger: { + error: jest.fn(), + }, +})); + describe('apiErrorHandler', () => { beforeEach(async () => { await RequestErrors.destroy({ where: {} }); + jest.clearAllMocks(); }); + afterAll(async () => { await RequestErrors.destroy({ where: {} }); await db.sequelize.close(); @@ -104,6 +113,7 @@ describe('apiErrorHandler', () => { await handleErrors(mockRequest, mockResponse, mockConnectionError, mockLogContext); expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('Connection Pool: Used Connections')); const requestErrors = await RequestErrors.findAll(); @@ -171,6 +181,7 @@ describe('apiErrorHandler', () => { ); expect(mockResponse.status).toHaveBeenCalledWith(INTERNAL_SERVER_ERROR); + expect(logger.error).toHaveBeenCalledWith(expect.stringContaining('Connection Pool: Used Connections')); const requestErrors = await RequestErrors.findAll(); diff --git a/src/routes/transactionWrapper.js b/src/routes/transactionWrapper.js index e0a79379c6..50afd9cfe6 100644 --- a/src/routes/transactionWrapper.js +++ b/src/routes/transactionWrapper.js @@ -17,10 +17,9 @@ export default function transactionWrapper(originalFunction, context = '') { // eslint-disable-next-line @typescript-eslint/return-await return await sequelize.transaction(async (transaction) => { httpContext.set('transactionId', transaction.id); - let result; try { await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); - result = await originalFunction(req, res, next); + const result = await originalFunction(req, res, next); const duration = Date.now() - startTime; auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); removeFromAuditedTransactions(); diff --git a/src/workers/transactionWrapper.ts b/src/workers/transactionWrapper.ts index 45cc2512f5..ab834217dd 100644 --- a/src/workers/transactionWrapper.ts +++ b/src/workers/transactionWrapper.ts @@ -22,17 +22,16 @@ const transactionQueueWrapper = ( return httpContext.ns.runPromise(async () => { httpContext.set('loggedUser', job.referenceData.userId); httpContext.set('impersonationUserId', job.referenceData.impersonationUserId); - httpContext.set('sessionSig', job.id); // TODO: what value should be used here + httpContext.set('sessionSig', job.id); httpContext.set('auditDescriptor', originalFunction.name); try { // eslint-disable-next-line @typescript-eslint/return-await return await sequelize.transaction(async (transaction) => { httpContext.set('transactionId', transaction.id); - let result; try { // eslint-disable-next-line await addAuditTransactionSettings(sequelize, null, null, 'transaction', originalFunction.name); - result = await originalFunction(job); + const result = await originalFunction(job); const duration = Date.now() - startTime; auditLogger.info(`${originalFunction.name} ${context} execution time: ${duration}ms`); removeFromAuditedTransactions(); From 5b4dfbf58f1104bd1a1311fe55ad2e042067477a Mon Sep 17 00:00:00 2001 From: nvms Date: Thu, 11 Jul 2024 15:27:49 -0400 Subject: [PATCH 40/60] different call when new vs. not-new --- frontend/src/fetchers/activityReports.js | 2 +- frontend/src/pages/ActivityReport/index.js | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index 392899455b..c8445237d3 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -102,7 +102,7 @@ export const getRecipients = async (region) => { return recipients.json(); }; -export const getRecipientsForAR = async (region, reportId) => { +export const getRecipientsForExistingAR = async (region, reportId) => { const recipients = await get(join(activityReportUrl, 'activity-recipients/', reportId, '/', `?region=${region}`)); return recipients.json(); }; diff --git a/frontend/src/pages/ActivityReport/index.js b/frontend/src/pages/ActivityReport/index.js index efac0b8777..3d18d855b3 100644 --- a/frontend/src/pages/ActivityReport/index.js +++ b/frontend/src/pages/ActivityReport/index.js @@ -34,13 +34,14 @@ import { submitReport, saveReport, getReport, - getRecipientsForAR, + getRecipientsForExistingAR, createReport, getCollaborators, getApprovers, reviewReport, resetToDraft, getGroupsForActivityReport, + getRecipients, } from '../../fetchers/activityReports'; import useLocalStorage, { setConnectionActiveWithError } from '../../hooks/useLocalStorage'; import NetworkContext, { isOnlineMode } from '../../NetworkContext'; @@ -277,8 +278,17 @@ function ActivityReport({ }; } + const getRecips = async () => { + if (reportId.current && reportId.current !== 'new') { + return getRecipientsForExistingAR(report.regionId, reportId.current); + } + + return getRecipients(report.regionId); + }; + const apiCalls = [ - getRecipientsForAR(report.regionId, reportId.current), + getRecips(), + // getRecipientsForAR(report.regionId, reportId.current), getCollaborators(report.regionId), getApprovers(report.regionId), getGroupsForActivityReport(report.regionId), From aab53502ec6b2146f25da7f38fb8008c9ef34845 Mon Sep 17 00:00:00 2001 From: nvms Date: Thu, 11 Jul 2024 16:10:15 -0400 Subject: [PATCH 41/60] fix AR tests --- frontend/src/fetchers/activityReports.js | 3 ++- frontend/src/pages/ActivityReport/__tests__/index.js | 4 ++++ frontend/src/pages/ActivityReport/index.js | 1 - src/routes/activityReports/handlers.js | 2 +- src/routes/activityReports/index.js | 4 ++-- 5 files changed, 9 insertions(+), 5 deletions(-) diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index c8445237d3..fa859fe8aa 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -103,7 +103,8 @@ export const getRecipients = async (region) => { }; export const getRecipientsForExistingAR = async (region, reportId) => { - const recipients = await get(join(activityReportUrl, 'activity-recipients/', reportId, '/', `?region=${region}`)); + const url = join(activityReportUrl, 'activity-recipients/', `${reportId}`, '/', `?region=${region}`); + const recipients = await get(url); return recipients.json(); }; diff --git a/frontend/src/pages/ActivityReport/__tests__/index.js b/frontend/src/pages/ActivityReport/__tests__/index.js index 017b0ca7f4..9a2d88f0ea 100644 --- a/frontend/src/pages/ActivityReport/__tests__/index.js +++ b/frontend/src/pages/ActivityReport/__tests__/index.js @@ -42,6 +42,7 @@ describe('ActivityReport', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', recipients); fetchMock.get('/api/activity-reports/groups?region=1', [{ id: 110, name: 'Group 1', @@ -155,6 +156,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -226,6 +228,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -363,6 +366,7 @@ describe('ActivityReport', () => { describe('resetToDraft', () => { it('navigates to the correct page', async () => { + fetchMock.get('/api/activity-reports/activity-recipients/3/?region=1', recipients); const data = formData(); // load the report fetchMock.get('/api/activity-reports/3', { diff --git a/frontend/src/pages/ActivityReport/index.js b/frontend/src/pages/ActivityReport/index.js index 3d18d855b3..7dda972b68 100644 --- a/frontend/src/pages/ActivityReport/index.js +++ b/frontend/src/pages/ActivityReport/index.js @@ -288,7 +288,6 @@ function ActivityReport({ const apiCalls = [ getRecips(), - // getRecipientsForAR(report.regionId, reportId.current), getCollaborators(report.regionId), getApprovers(report.regionId), getGroupsForActivityReport(report.regionId), diff --git a/src/routes/activityReports/handlers.js b/src/routes/activityReports/handlers.js index e2aeba2640..bc3a94f5d0 100644 --- a/src/routes/activityReports/handlers.js +++ b/src/routes/activityReports/handlers.js @@ -722,7 +722,7 @@ export async function getActivityRecipients(req, res) { res.json(activityRecipients); } -export async function getActivityRecipientsReportId(req, res) { +export async function getActivityRecipientsForExistingReport(req, res) { const { region } = req.query; const { reportId } = req.params; const targetRegion = parseInt(region, DECIMAL_BASE); diff --git a/src/routes/activityReports/index.js b/src/routes/activityReports/index.js index 93f74a0ba9..a1fb297327 100644 --- a/src/routes/activityReports/index.js +++ b/src/routes/activityReports/index.js @@ -9,7 +9,7 @@ import { getReports, getReportAlerts, getActivityRecipients, - getActivityRecipientsReportId, + getActivityRecipientsForExistingReport, getGoals, reviewReport, resetToDraft, @@ -41,7 +41,7 @@ router.post('/', transactionWrapper(createReport)); router.get('/approvers', transactionWrapper(getApprovers)); router.get('/groups', transactionWrapper(getGroups)); router.get('/activity-recipients', transactionWrapper(getActivityRecipients)); -router.get('/activity-recipients/:reportId', transactionWrapper(getActivityRecipientsReportId)); +router.get('/activity-recipients/:reportId', transactionWrapper(getActivityRecipientsForExistingReport)); router.get('/goals', transactionWrapper(getGoals)); router.post('/goals', transactionWrapper(createGoalsForReport)); router.post('/objectives', transactionWrapper(saveOtherEntityObjectivesForReport)); From 2c10411678d789e870e8ac3ac66039bac28075e6 Mon Sep 17 00:00:00 2001 From: nvms Date: Thu, 11 Jul 2024 16:11:12 -0400 Subject: [PATCH 42/60] fix localStorage tests --- frontend/src/pages/ActivityReport/__tests__/localStorage.js | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/src/pages/ActivityReport/__tests__/localStorage.js b/frontend/src/pages/ActivityReport/__tests__/localStorage.js index 206e22e88d..07bb018c43 100644 --- a/frontend/src/pages/ActivityReport/__tests__/localStorage.js +++ b/frontend/src/pages/ActivityReport/__tests__/localStorage.js @@ -37,6 +37,7 @@ describe('Local storage fallbacks', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', recipients); fetchMock.get('/api/activity-reports/groups?region=1', []); fetchMock.get('/api/users/collaborators?region=1', []); fetchMock.get('/api/activity-reports/approvers?region=1', []); From 8c2a5e05d593c865b12871fda57e7f1be94fb9ee Mon Sep 17 00:00:00 2001 From: nvms Date: Thu, 11 Jul 2024 16:51:47 -0400 Subject: [PATCH 43/60] don't force programType --- src/services/activityReports.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/services/activityReports.js b/src/services/activityReports.js index 3edd6432ee..6d35b1365b 100644 --- a/src/services/activityReports.js +++ b/src/services/activityReports.js @@ -1144,7 +1144,7 @@ export async function possibleRecipients(regionId, activityReportId = null) { model: Program, as: 'programs', attributes: ['programType'], - required: true, + required: false, }, { model: Recipient, From fd1a662526b188d31493b9415d4e1594612d0ab9 Mon Sep 17 00:00:00 2001 From: nvms Date: Fri, 12 Jul 2024 14:10:41 -0400 Subject: [PATCH 44/60] endpoint path change --- frontend/src/fetchers/activityReports.js | 2 +- frontend/src/pages/ActivityReport/__tests__/index.js | 8 ++++---- .../src/pages/ActivityReport/__tests__/localStorage.js | 2 +- src/routes/activityReports/handlers.js | 4 ++-- src/routes/activityReports/index.js | 1 + 5 files changed, 9 insertions(+), 8 deletions(-) diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index fa859fe8aa..7340017431 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -103,7 +103,7 @@ export const getRecipients = async (region) => { }; export const getRecipientsForExistingAR = async (region, reportId) => { - const url = join(activityReportUrl, 'activity-recipients/', `${reportId}`, '/', `?region=${region}`); + const url = join(activityReportUrl, `${reportId}`, 'activity-recipients', `?region=${region}`); const recipients = await get(url); return recipients.json(); }; diff --git a/frontend/src/pages/ActivityReport/__tests__/index.js b/frontend/src/pages/ActivityReport/__tests__/index.js index 9a2d88f0ea..0dad5cda20 100644 --- a/frontend/src/pages/ActivityReport/__tests__/index.js +++ b/frontend/src/pages/ActivityReport/__tests__/index.js @@ -42,7 +42,7 @@ describe('ActivityReport', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); - fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', recipients); fetchMock.get('/api/activity-reports/groups?region=1', [{ id: 110, name: 'Group 1', @@ -156,7 +156,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); - fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -228,7 +228,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); - fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -366,7 +366,7 @@ describe('ActivityReport', () => { describe('resetToDraft', () => { it('navigates to the correct page', async () => { - fetchMock.get('/api/activity-reports/activity-recipients/3/?region=1', recipients); + fetchMock.get('/api/activity-reports/3/activity-recipients/?region=1', recipients); const data = formData(); // load the report fetchMock.get('/api/activity-reports/3', { diff --git a/frontend/src/pages/ActivityReport/__tests__/localStorage.js b/frontend/src/pages/ActivityReport/__tests__/localStorage.js index 07bb018c43..7ed0e8eb55 100644 --- a/frontend/src/pages/ActivityReport/__tests__/localStorage.js +++ b/frontend/src/pages/ActivityReport/__tests__/localStorage.js @@ -37,7 +37,7 @@ describe('Local storage fallbacks', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); - fetchMock.get('/api/activity-reports/activity-recipients/1/?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', recipients); fetchMock.get('/api/activity-reports/groups?region=1', []); fetchMock.get('/api/users/collaborators?region=1', []); fetchMock.get('/api/activity-reports/approvers?region=1', []); diff --git a/src/routes/activityReports/handlers.js b/src/routes/activityReports/handlers.js index bc3a94f5d0..8ace5b3869 100644 --- a/src/routes/activityReports/handlers.js +++ b/src/routes/activityReports/handlers.js @@ -724,9 +724,9 @@ export async function getActivityRecipients(req, res) { export async function getActivityRecipientsForExistingReport(req, res) { const { region } = req.query; - const { reportId } = req.params; + const { activityReportId } = req.params; const targetRegion = parseInt(region, DECIMAL_BASE); - const activityRecipients = await possibleRecipients(targetRegion, reportId); + const activityRecipients = await possibleRecipients(targetRegion, activityReportId); res.json(activityRecipients); } diff --git a/src/routes/activityReports/index.js b/src/routes/activityReports/index.js index a1fb297327..24329cbc91 100644 --- a/src/routes/activityReports/index.js +++ b/src/routes/activityReports/index.js @@ -62,5 +62,6 @@ router.put('/:activityReportId/review', checkActivityReportIdParam, transactionW router.put('/:activityReportId/submit', checkActivityReportIdParam, transactionWrapper(submitReport)); router.put('/:activityReportId/unlock', checkActivityReportIdParam, transactionWrapper(unlockReport)); router.put('/:activityReportId/goals/edit', checkActivityReportIdParam, transactionWrapper(setGoalAsActivelyEdited)); +router.get('/:activityReportId/activity-recipients', transactionWrapper(getActivityRecipientsForExistingReport)); export default router; From 5e482c941ec4b683cb06b3a6c81ee005d45a4e26 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Fri, 12 Jul 2024 11:18:31 -0700 Subject: [PATCH 45/60] Update handlers.test.js --- src/routes/ssdi/handlers.test.js | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/ssdi/handlers.test.js index cd3429d1fd..6efce9fecd 100644 --- a/src/routes/ssdi/handlers.test.js +++ b/src/routes/ssdi/handlers.test.js @@ -190,23 +190,33 @@ describe('API Endpoints', () => { }); it('should filter out non-integer regionIds', async () => { + const filterRegionsMock = jest.fn((val) => val); + Generic.mockImplementation(() => ({ + filterRegions: filterRegionsMock, + getAllAccessibleRegions: jest.fn(() => []), + })); const response = await request(app) .post('/runQuery') .query({ path: 'src/queries/test/path' }) .send({ regionIds: [1, 'a', 2, 'b', 3] }); expect(response.status).toBe(200); - expect(Generic().filterRegions).toHaveBeenCalledWith([1, 2, 3]); + expect(filterRegionsMock).toHaveBeenCalledWith([1, 2, 3]); }); it('should filter regionIds using policy', async () => { + const filterRegionsMock = jest.fn((val) => val); + Generic.mockImplementation(() => ({ + filterRegions: filterRegionsMock, + getAllAccessibleRegions: jest.fn(() => []), + })); const response = await request(app) .post('/runQuery') .query({ path: 'src/queries/test/path' }) .send({ regionIds: [1, 2, 3, 4] }); expect(response.status).toBe(200); - expect(Generic().filterRegions).toHaveBeenCalledWith([1, 2, 3, 4]); + expect(filterRegionsMock).toHaveBeenCalledWith([1, 2, 3, 4]); expect(response.body).toEqual([{ id: 1, name: 'Test' }]); }); }); From 95772c7653037fc91d96af6a75bda0e6011a3f5d Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Fri, 12 Jul 2024 11:27:05 -0700 Subject: [PATCH 46/60] Update ssdi.test.js --- src/services/ssdi.test.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/services/ssdi.test.js b/src/services/ssdi.test.js index 8f3ef4d036..a1c0cfea7b 100644 --- a/src/services/ssdi.test.js +++ b/src/services/ssdi.test.js @@ -28,6 +28,7 @@ jest.mock('../models', () => ({ beforeEach(() => { queryFileCache.clear(); queryDataCache.clear(); + jest.clearAllMocks(); }); describe('ssdi', () => { @@ -105,7 +106,7 @@ describe('ssdi', () => { expect(result).toEqual([cachedQueryFile]); // Ensure fs.readFileSync is not called since data is from cache - expect(fs.readFileSync).not.toHaveBeenCalled(); + expect(fs.readFileSync).not.toHaveBeenCalledWith('test/directory/file1.sql', 'utf8'); }); }); From b0a4dd1611ac72517d0e76f62a1bbcae48d8ae32 Mon Sep 17 00:00:00 2001 From: Nathan Powell Date: Fri, 12 Jul 2024 12:12:18 -0700 Subject: [PATCH 47/60] populate regionId on old session report --- .../20240715000000-fix-old-session-regions.js | 44 +++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 src/migrations/20240715000000-fix-old-session-regions.js diff --git a/src/migrations/20240715000000-fix-old-session-regions.js b/src/migrations/20240715000000-fix-old-session-regions.js new file mode 100644 index 0000000000..6cda13b575 --- /dev/null +++ b/src/migrations/20240715000000-fix-old-session-regions.js @@ -0,0 +1,44 @@ +const { + prepMigration, +} = require('../lib/migration'); + +module.exports = { + up: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + await queryInterface.sequelize.query(/* sql */` + + -- One very old session lacks the regionId value + -- This finds and sets it + CREATE TEMP TABLE sr_updates + AS + WITH updater AS ( + UPDATE "SessionReportPilots" srp + SET data = JSONB_SET(srp.data,'{regionId}',TO_JSONB(erp."regionId")) + FROM "EventReportPilots" erp + WHERE erp.id = srp."eventId" + AND srp.data->>'regionId' = '' + RETURNING + srp.id srpid, + erp."regionId" + ) SELECT * FROM updater + ; + + SELECT * FROM sr_updates; + -- Looks like: + ---------------------- + -- srpid | regionId + -- -------+---------- + -- 2 | 3 + `, { transaction }); + }, + ), + + down: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + // If we end up needing to revert this, it would be easier to use a separate + // migration using the txid (or a similar identifier) after it's already set + }, + ), +}; From e5febbbb813f02c71c7c7e4ed33bea589ce5c6e8 Mon Sep 17 00:00:00 2001 From: Nathan Powell Date: Fri, 12 Jul 2024 13:01:35 -0700 Subject: [PATCH 48/60] clean up temp tables for CI --- src/migrations/20240715000000-fix-old-session-regions.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/migrations/20240715000000-fix-old-session-regions.js b/src/migrations/20240715000000-fix-old-session-regions.js index 6cda13b575..1806817bdf 100644 --- a/src/migrations/20240715000000-fix-old-session-regions.js +++ b/src/migrations/20240715000000-fix-old-session-regions.js @@ -10,6 +10,7 @@ module.exports = { -- One very old session lacks the regionId value -- This finds and sets it + DROP TABLE IF EXISTS sr_updates; CREATE TEMP TABLE sr_updates AS WITH updater AS ( From a6139fb0897ed846ec8f7da2a162360dd6284913 Mon Sep 17 00:00:00 2001 From: Nathan Powell Date: Fri, 12 Jul 2024 13:19:09 -0700 Subject: [PATCH 49/60] pacify linter by removing tab --- src/migrations/20240715000000-fix-old-session-regions.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/migrations/20240715000000-fix-old-session-regions.js b/src/migrations/20240715000000-fix-old-session-regions.js index 1806817bdf..16b9e24352 100644 --- a/src/migrations/20240715000000-fix-old-session-regions.js +++ b/src/migrations/20240715000000-fix-old-session-regions.js @@ -10,7 +10,7 @@ module.exports = { -- One very old session lacks the regionId value -- This finds and sets it - DROP TABLE IF EXISTS sr_updates; + DROP TABLE IF EXISTS sr_updates; CREATE TEMP TABLE sr_updates AS WITH updater AS ( From b1e611318e9e841dba1c49512d5ee4fe88b543e9 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Fri, 12 Jul 2024 15:43:41 -0700 Subject: [PATCH 50/60] Update apiErrorHandler.js --- src/lib/apiErrorHandler.js | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lib/apiErrorHandler.js b/src/lib/apiErrorHandler.js index 3b3ecd8d69..0568aafb5a 100644 --- a/src/lib/apiErrorHandler.js +++ b/src/lib/apiErrorHandler.js @@ -25,7 +25,7 @@ async function logRequestError(req, operation, error, logContext) { } try { - const responseBody = typeof error === 'object' && error !== null + const responseBody = typeof error === 'object' ? { ...error, errorStack: error?.stack } : error; @@ -35,6 +35,7 @@ async function logRequestError(req, operation, error, logContext) { ...(req.query && typeof req.query === 'object' && Object.keys(req.query).length > 0 && { query: req.query }), }; + // Create a request error in the database and get its ID const requestErrorId = await createRequestError({ operation, uri: req.originalUrl, @@ -138,9 +139,12 @@ const logWorkerError = async (job, operation, error, logContext) => { ) { return 0; } + if (!error) { + return 0; + } try { - const responseBody = typeof error === 'object' && error !== null + const responseBody = typeof error === 'object' ? { ...error, errorStack: error?.stack } : error; From 0ef613c0cf1a87cf4e4eaf5b908d5caf917bde88 Mon Sep 17 00:00:00 2001 From: nvms Date: Mon, 15 Jul 2024 08:24:05 -0400 Subject: [PATCH 51/60] update test --- frontend/src/pages/ActivityReport/__tests__/index.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/src/pages/ActivityReport/__tests__/index.js b/frontend/src/pages/ActivityReport/__tests__/index.js index 0dad5cda20..1164d5b731 100644 --- a/frontend/src/pages/ActivityReport/__tests__/index.js +++ b/frontend/src/pages/ActivityReport/__tests__/index.js @@ -42,7 +42,7 @@ describe('ActivityReport', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); - fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', recipients); fetchMock.get('/api/activity-reports/groups?region=1', [{ id: 110, name: 'Group 1', @@ -156,7 +156,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); - fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -228,7 +228,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); - fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -366,7 +366,7 @@ describe('ActivityReport', () => { describe('resetToDraft', () => { it('navigates to the correct page', async () => { - fetchMock.get('/api/activity-reports/3/activity-recipients/?region=1', recipients); + fetchMock.get('/api/activity-reports/3/activity-recipients?region=1', recipients); const data = formData(); // load the report fetchMock.get('/api/activity-reports/3', { From 440a55ac7d9f6b35f69259f9e492b63eabe34f3b Mon Sep 17 00:00:00 2001 From: nvms Date: Mon, 15 Jul 2024 08:30:57 -0400 Subject: [PATCH 52/60] put behind read permissions --- src/routes/activityReports/handlers.js | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/routes/activityReports/handlers.js b/src/routes/activityReports/handlers.js index 8ace5b3869..94048d25d9 100644 --- a/src/routes/activityReports/handlers.js +++ b/src/routes/activityReports/handlers.js @@ -725,6 +725,17 @@ export async function getActivityRecipients(req, res) { export async function getActivityRecipientsForExistingReport(req, res) { const { region } = req.query; const { activityReportId } = req.params; + + const [report] = await activityReportAndRecipientsById(req.params.activityReportId); + const userId = await currentUserId(req, res); + const user = await userById(userId); + const authorization = new ActivityReport(user, report); + + if (!authorization.canGet()) { + res.sendStatus(403); + return; + } + const targetRegion = parseInt(region, DECIMAL_BASE); const activityRecipients = await possibleRecipients(targetRegion, activityReportId); res.json(activityRecipients); From c678a4c38236288ebb435cb3fb57746a51990656 Mon Sep 17 00:00:00 2001 From: nvms Date: Mon, 15 Jul 2024 08:43:52 -0400 Subject: [PATCH 53/60] update test --- frontend/src/pages/ActivityReport/__tests__/localStorage.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/pages/ActivityReport/__tests__/localStorage.js b/frontend/src/pages/ActivityReport/__tests__/localStorage.js index 7ed0e8eb55..3ab0cc4303 100644 --- a/frontend/src/pages/ActivityReport/__tests__/localStorage.js +++ b/frontend/src/pages/ActivityReport/__tests__/localStorage.js @@ -37,7 +37,7 @@ describe('Local storage fallbacks', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); - fetchMock.get('/api/activity-reports/1/activity-recipients/?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', recipients); fetchMock.get('/api/activity-reports/groups?region=1', []); fetchMock.get('/api/users/collaborators?region=1', []); fetchMock.get('/api/activity-reports/approvers?region=1', []); From 3a7937465d8c94f5f1d7ca30e0aa2418ac1c2dbc Mon Sep 17 00:00:00 2001 From: nvms Date: Mon, 15 Jul 2024 09:07:41 -0400 Subject: [PATCH 54/60] get region id from the report --- frontend/src/fetchers/activityReports.js | 2 +- frontend/src/pages/ActivityReport/__tests__/index.js | 8 ++++---- .../src/pages/ActivityReport/__tests__/localStorage.js | 2 +- src/routes/activityReports/handlers.js | 5 ++--- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index 7340017431..4cb30d04ea 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -103,7 +103,7 @@ export const getRecipients = async (region) => { }; export const getRecipientsForExistingAR = async (region, reportId) => { - const url = join(activityReportUrl, `${reportId}`, 'activity-recipients', `?region=${region}`); + const url = join(activityReportUrl, `${reportId}`, 'activity-recipients'); const recipients = await get(url); return recipients.json(); }; diff --git a/frontend/src/pages/ActivityReport/__tests__/index.js b/frontend/src/pages/ActivityReport/__tests__/index.js index 1164d5b731..79a2da463d 100644 --- a/frontend/src/pages/ActivityReport/__tests__/index.js +++ b/frontend/src/pages/ActivityReport/__tests__/index.js @@ -42,7 +42,7 @@ describe('ActivityReport', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); - fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients', recipients); fetchMock.get('/api/activity-reports/groups?region=1', [{ id: 110, name: 'Group 1', @@ -156,7 +156,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); - fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -228,7 +228,7 @@ describe('ActivityReport', () => { }; fetchMock.get('/api/activity-reports/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); - fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', groupRecipients, { overwriteRoutes: true }); + fetchMock.get('/api/activity-reports/1/activity-recipients', groupRecipients, { overwriteRoutes: true }); const data = formData(); fetchMock.get('/api/activity-reports/1', { ...data, activityRecipients: [] }); @@ -366,7 +366,7 @@ describe('ActivityReport', () => { describe('resetToDraft', () => { it('navigates to the correct page', async () => { - fetchMock.get('/api/activity-reports/3/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/3/activity-recipients', recipients); const data = formData(); // load the report fetchMock.get('/api/activity-reports/3', { diff --git a/frontend/src/pages/ActivityReport/__tests__/localStorage.js b/frontend/src/pages/ActivityReport/__tests__/localStorage.js index 3ab0cc4303..dac60ae551 100644 --- a/frontend/src/pages/ActivityReport/__tests__/localStorage.js +++ b/frontend/src/pages/ActivityReport/__tests__/localStorage.js @@ -37,7 +37,7 @@ describe('Local storage fallbacks', () => { beforeEach(() => { fetchMock.get('/api/activity-reports/activity-recipients?region=1', recipients); - fetchMock.get('/api/activity-reports/1/activity-recipients?region=1', recipients); + fetchMock.get('/api/activity-reports/1/activity-recipients', recipients); fetchMock.get('/api/activity-reports/groups?region=1', []); fetchMock.get('/api/users/collaborators?region=1', []); fetchMock.get('/api/activity-reports/approvers?region=1', []); diff --git a/src/routes/activityReports/handlers.js b/src/routes/activityReports/handlers.js index 94048d25d9..7b6189879d 100644 --- a/src/routes/activityReports/handlers.js +++ b/src/routes/activityReports/handlers.js @@ -723,10 +723,9 @@ export async function getActivityRecipients(req, res) { } export async function getActivityRecipientsForExistingReport(req, res) { - const { region } = req.query; const { activityReportId } = req.params; - const [report] = await activityReportAndRecipientsById(req.params.activityReportId); + const [report] = await activityReportAndRecipientsById(activityReportId); const userId = await currentUserId(req, res); const user = await userById(userId); const authorization = new ActivityReport(user, report); @@ -736,7 +735,7 @@ export async function getActivityRecipientsForExistingReport(req, res) { return; } - const targetRegion = parseInt(region, DECIMAL_BASE); + const targetRegion = parseInt(report.regionId, DECIMAL_BASE); const activityRecipients = await possibleRecipients(targetRegion, activityReportId); res.json(activityRecipients); } From 7cd85f703eefdf68f8e00850bc8f789b0ff14ba7 Mon Sep 17 00:00:00 2001 From: nvms Date: Mon, 15 Jul 2024 09:11:47 -0400 Subject: [PATCH 55/60] dont pass region id --- frontend/src/fetchers/activityReports.js | 2 +- frontend/src/pages/ActivityReport/index.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/fetchers/activityReports.js b/frontend/src/fetchers/activityReports.js index 4cb30d04ea..6f98a9dcd8 100644 --- a/frontend/src/fetchers/activityReports.js +++ b/frontend/src/fetchers/activityReports.js @@ -102,7 +102,7 @@ export const getRecipients = async (region) => { return recipients.json(); }; -export const getRecipientsForExistingAR = async (region, reportId) => { +export const getRecipientsForExistingAR = async (reportId) => { const url = join(activityReportUrl, `${reportId}`, 'activity-recipients'); const recipients = await get(url); return recipients.json(); diff --git a/frontend/src/pages/ActivityReport/index.js b/frontend/src/pages/ActivityReport/index.js index 7dda972b68..323bb0f5d8 100644 --- a/frontend/src/pages/ActivityReport/index.js +++ b/frontend/src/pages/ActivityReport/index.js @@ -280,7 +280,7 @@ function ActivityReport({ const getRecips = async () => { if (reportId.current && reportId.current !== 'new') { - return getRecipientsForExistingAR(report.regionId, reportId.current); + return getRecipientsForExistingAR(reportId.current); } return getRecipients(report.regionId); From a0170be926eb4c2d219169d2b89154d2b17c0c61 Mon Sep 17 00:00:00 2001 From: Krys Wisnaskas Date: Tue, 16 Jul 2024 10:14:41 -0400 Subject: [PATCH 56/60] Remove ObjectiveRoles --- src/scopes/goals/index.js | 5 ----- src/scopes/goals/role.js | 30 ------------------------------ 2 files changed, 35 deletions(-) delete mode 100644 src/scopes/goals/role.js diff --git a/src/scopes/goals/index.js b/src/scopes/goals/index.js index aee1ecb0a2..796e43f2c1 100644 --- a/src/scopes/goals/index.js +++ b/src/scopes/goals/index.js @@ -6,7 +6,6 @@ import { withReasons, withoutReasons } from './reasons'; import { withRecipientName, withoutRecipientName } from './recipient'; import { withRecipientId } from './recipientId'; import { withRegion, withoutRegion } from './region'; -import { withRoles, withoutRoles } from './role'; import { containsGrantNumber, doesNotContainGrantNumber, withGrantNumber, withoutGrantNumber, } from './grantNumber'; @@ -74,10 +73,6 @@ export const topicToQuery = { in: (query) => withRegion(query), nin: (query) => withoutRegion(query), }, - role: { - in: (query) => withRoles(query), - nin: (query) => withoutRoles(query), - }, group: { in: (query, _options, userId) => withGroup(query, userId), nin: (query, _options, userId) => withoutGroup(query, userId), diff --git a/src/scopes/goals/role.js b/src/scopes/goals/role.js deleted file mode 100644 index 62ae91d2e1..0000000000 --- a/src/scopes/goals/role.js +++ /dev/null @@ -1,30 +0,0 @@ -import { Op } from 'sequelize'; -import { filterAssociation } from './utils'; - -const roleFilter = ` -SELECT - DISTINCT "Goal"."id" -FROM "Objectives" "Objectives" -INNER JOIN "ObjectiveRoles" "ObjectiveRoles" -ON "Objectives"."id" = "ObjectiveRoles"."objectiveId" -INNER JOIN "Roles" "Roles" -ON "ObjectiveRoles"."roleId" = "Roles"."id" -INNER JOIN "Goals" "Goal" -ON "Objectives"."goalId" = "Goal"."id" -WHERE "Roles"."name"`; - -export function withRoles(roles) { - return { - [Op.or]: [ - filterAssociation(roleFilter, roles, false), - ], - }; -} - -export function withoutRoles(roles) { - return { - [Op.and]: [ - filterAssociation(roleFilter, roles, true), - ], - }; -} From fac45de3773fd257a80857e56443095c9dbd698d Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 16 Jul 2024 10:17:03 -0700 Subject: [PATCH 57/60] relocate --- src/routes/admin/index.js | 2 ++ src/routes/{ => admin}/ssdi/handlers.test.js | 0 src/routes/{ => admin}/ssdi/handlers.ts | 0 src/routes/{ => admin}/ssdi/index.ts | 0 src/routes/apiDirectory.js | 2 -- 5 files changed, 2 insertions(+), 2 deletions(-) rename src/routes/{ => admin}/ssdi/handlers.test.js (100%) rename src/routes/{ => admin}/ssdi/handlers.ts (100%) rename src/routes/{ => admin}/ssdi/index.ts (100%) diff --git a/src/routes/admin/index.js b/src/routes/admin/index.js index 140dc96074..b62021cd3a 100644 --- a/src/routes/admin/index.js +++ b/src/routes/admin/index.js @@ -13,6 +13,7 @@ import ssRouter from './ss'; import trainingReportRouter from './trainingReport'; import legacyReportRouter from './legacyReports'; import courseRouter from './course'; +import ssdiRouter from './ssdi'; import userAdminAccessMiddleware from '../../middleware/userAdminAccessMiddleware'; import transactionWrapper from '../transactionWrapper'; @@ -35,5 +36,6 @@ router.use('/training-reports', trainingReportRouter); router.use('/legacy-reports', legacyReportRouter); router.use('/courses', courseRouter); router.use('/ss', ssRouter); +router.use('/ssdi', ssdiRouter); export default router; diff --git a/src/routes/ssdi/handlers.test.js b/src/routes/admin/ssdi/handlers.test.js similarity index 100% rename from src/routes/ssdi/handlers.test.js rename to src/routes/admin/ssdi/handlers.test.js diff --git a/src/routes/ssdi/handlers.ts b/src/routes/admin/ssdi/handlers.ts similarity index 100% rename from src/routes/ssdi/handlers.ts rename to src/routes/admin/ssdi/handlers.ts diff --git a/src/routes/ssdi/index.ts b/src/routes/admin/ssdi/index.ts similarity index 100% rename from src/routes/ssdi/index.ts rename to src/routes/admin/ssdi/index.ts diff --git a/src/routes/apiDirectory.js b/src/routes/apiDirectory.js index c4483241de..3efab5ff4b 100644 --- a/src/routes/apiDirectory.js +++ b/src/routes/apiDirectory.js @@ -33,7 +33,6 @@ import monitoringRouter from './monitoring'; import coursesRouter from './courses'; import { currentUserId } from '../services/currentUser'; import objectiveRouter from './objectives'; -import ssdiRouter from './ssdi'; export const loginPath = '/login'; @@ -81,7 +80,6 @@ router.use('/national-center', nationalCenterRouter); router.use('/communication-logs', communicationLogRouter); router.use('/monitoring', monitoringRouter); router.use('/courses', coursesRouter); -router.use('/ssdi', ssdiRouter); const getUser = async (req, res) => { const userId = await currentUserId(req, res); From 86d95a0fb5e1d31ea5c701a89911b8dd1a60984a Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 16 Jul 2024 10:24:07 -0700 Subject: [PATCH 58/60] fix paths --- src/routes/admin/ssdi/handlers.test.js | 8 ++++---- src/routes/admin/ssdi/handlers.ts | 8 ++++---- src/routes/admin/ssdi/index.ts | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/routes/admin/ssdi/handlers.test.js b/src/routes/admin/ssdi/handlers.test.js index 6efce9fecd..300d819a20 100644 --- a/src/routes/admin/ssdi/handlers.test.js +++ b/src/routes/admin/ssdi/handlers.test.js @@ -8,11 +8,11 @@ import { sanitizeFilename, generateFlagString, executeQuery, -} from '../../services/ssdi'; -import { currentUserId } from '../../services/currentUser'; -import { userById } from '../../services/users'; +} from '../../../services/ssdi'; +import { currentUserId } from '../../../services/currentUser'; +import { userById } from '../../../services/users'; import { listQueries, getFlags, runQuery } from './handlers'; -import Generic from '../../policies/generic'; +import Generic from '../../../policies/generic'; jest.mock('../../services/ssdi', () => ({ listQueryFiles: jest.fn(), diff --git a/src/routes/admin/ssdi/handlers.ts b/src/routes/admin/ssdi/handlers.ts index 10e4aaefc3..700f7d7f9c 100644 --- a/src/routes/admin/ssdi/handlers.ts +++ b/src/routes/admin/ssdi/handlers.ts @@ -1,7 +1,7 @@ import stringify from 'csv-stringify/lib/sync'; import { Request, Response } from 'express'; -import { currentUserId } from '../../services/currentUser'; -import { userById } from '../../services/users'; +import { currentUserId } from '../../../services/currentUser'; +import { userById } from '../../../services/users'; import { FlagValues, listQueryFiles, @@ -11,8 +11,8 @@ import { sanitizeFilename, generateFlagString, executeQuery, -} from '../../services/ssdi'; -import Generic from '../../policies/generic'; +} from '../../../services/ssdi'; +import Generic from '../../../policies/generic'; // list all available query files with name and description const listQueries = async (req: Request, res: Response) => { diff --git a/src/routes/admin/ssdi/index.ts b/src/routes/admin/ssdi/index.ts index 3e8f3ee347..091e9d84a4 100644 --- a/src/routes/admin/ssdi/index.ts +++ b/src/routes/admin/ssdi/index.ts @@ -1,6 +1,6 @@ import express from 'express'; -import transactionWrapper from '../transactionWrapper'; -import authMiddleware from '../../middleware/authMiddleware'; +import transactionWrapper from '../../transactionWrapper'; +import authMiddleware from '../../../middleware/authMiddleware'; import { listQueries, getFlags, From a9c13d6998a4d36bc0a3f39e4fde263beea9280b Mon Sep 17 00:00:00 2001 From: nvms Date: Tue, 16 Jul 2024 13:40:57 -0400 Subject: [PATCH 59/60] increase from 3 to 4 --- deployment_config/prod_vars.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deployment_config/prod_vars.yml b/deployment_config/prod_vars.yml index 665e806d06..14d1d96c84 100644 --- a/deployment_config/prod_vars.yml +++ b/deployment_config/prod_vars.yml @@ -1,6 +1,6 @@ env: prod web_instances: 3 -web_memory: 3GB +web_memory: 4GB worker_instances: 2 worker_memory: 1GB similarity_api_instances: 1 From 5051243ac33061061da38a49a94bff52ae2007a4 Mon Sep 17 00:00:00 2001 From: GarrettEHill Date: Tue, 16 Jul 2024 11:13:13 -0700 Subject: [PATCH 60/60] fix paths --- src/routes/admin/ssdi/handlers.test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/routes/admin/ssdi/handlers.test.js b/src/routes/admin/ssdi/handlers.test.js index 300d819a20..a6bdf4bd68 100644 --- a/src/routes/admin/ssdi/handlers.test.js +++ b/src/routes/admin/ssdi/handlers.test.js @@ -14,7 +14,7 @@ import { userById } from '../../../services/users'; import { listQueries, getFlags, runQuery } from './handlers'; import Generic from '../../../policies/generic'; -jest.mock('../../services/ssdi', () => ({ +jest.mock('../../../services/ssdi', () => ({ listQueryFiles: jest.fn(), readFlagsAndQueryFromFile: jest.fn(), validateFlagValues: jest.fn(), @@ -24,15 +24,15 @@ jest.mock('../../services/ssdi', () => ({ executeQuery: jest.fn(), })); -jest.mock('../../services/currentUser', () => ({ +jest.mock('../../../services/currentUser', () => ({ currentUserId: jest.fn(), })); -jest.mock('../../services/users', () => ({ +jest.mock('../../../services/users', () => ({ userById: jest.fn(), })); -jest.mock('../../policies/generic'); +jest.mock('../../../policies/generic'); const app = express(); app.use(express.json());