diff --git a/.circleci/config.yml b/.circleci/config.yml index 10d7a19338..95af21f9b3 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,7 +18,7 @@ executors: POSTGRES_DB: ttasmarthub docker-python-executor: docker: - - image: cimg/python:3.9.19 + - image: cimg/python:3.9.20 machine-executor: machine: image: ubuntu-2204:current @@ -560,10 +560,10 @@ parameters: type: string dev_git_branch: # change to feature branch to test deployment description: "Name of github branch that will deploy to dev" - default: "mb/TTAHUB-3484/insert-standard-goals" + default: "kw-unsafe-inline" type: string sandbox_git_branch: # change to feature branch to test deployment - default: "mb/TTAHUB-3478/goal-nudge-version-2" + default: "mb/TTAHUB-3483/checkbox-to-activity-reports" type: string prod_new_relic_app_id: default: "877570491" @@ -580,19 +580,7 @@ parameters: manual-trigger: type: boolean default: false - env_list: - description: "List of environments to manage (start/stop)" - type: string - default: "tta-smarthub-dev,tta-smarthub-sandbox" - space_list: - description: "List of Cloud Foundry spaces corresponding to each environment" - type: string - default: "" - env_state: - description: "State of the environment to change (start, stop, restart, restage)" - type: string - default: "none" - manual-manage-env: + fail-on-modified-lines: type: boolean default: false jobs: @@ -695,6 +683,9 @@ jobs: at: . - setup_remote_docker: version: default + - run: + name: Add GitHub to known_hosts + command: ssh-keyscan -H github.com >> ~/.ssh/known_hosts - run: name: Run migrations ci command: yarn db:migrate:ci @@ -711,6 +702,19 @@ jobs: command: | chmod 744 ./bin/test-backend-ci ./bin/test-backend-ci + # Run coverage check script + - run: + name: Check coverage for modified lines + command: | + if [ -n "${CIRCLE_PULL_REQUEST}" ]; then + chmod +x ./tools/check-coverage.js + node -r esm ./tools/check-coverage.js \ + --fail-on-uncovered=<< pipeline.parameters.fail-on-modified-lines >> \ + --output-format=json,html + else + echo "Not a PR build. Skipping coverage check." + fi + when: always - run: name: Compress coverage artifacts command: tar -cvzf backend-coverage-artifacts.tar coverage/ @@ -720,6 +724,10 @@ jobs: path: backend-coverage-artifacts.tar - store_test_results: path: reports/ + # Store uncovered lines artifact if exists + - store_artifacts: + path: coverage-artifacts/ + destination: uncovered-lines resource_class: large test_similarity_api: executor: docker-python-executor @@ -775,13 +783,37 @@ jobs: command: | chmod 744 ./checkcolorhash.sh ./checkcolorhash.sh; + - run: + name: Add GitHub to known_hosts + command: | + mkdir -p /home/circleci/.ssh + ssh-keyscan -H github.com >> /home/circleci/.ssh/known_hosts + - run: name: Test frontend command: yarn --cwd frontend run test:ci --maxWorkers=50% + - run: + name: Check coverage for modified lines + command: | + if [ -n "${CIRCLE_PULL_REQUEST}" ]; then + chmod +x ./tools/check-coverage.js + node -r esm ./tools/check-coverage.js \ + --coverage-file=../frontend/coverage/coverage-final.json \ + --artifact-dir=../frontend/coverage-artifacts \ + --directory-filter=frontend/ \ + --fail-on-uncovered=<< pipeline.parameters.fail-on-modified-lines >> \ + --output-format=json,html + else + echo "Not a PR build. Skipping coverage check." + fi + when: always - store_test_results: path: frontend/reports/ - store_artifacts: path: frontend/coverage/ + - store_artifacts: + path: frontend/coverage-artifacts/ + destination: uncovered-lines resource_class: large test_e2e: executor: docker-postgres-executor @@ -1264,88 +1296,10 @@ jobs: rds_service_name: ttahub-prod s3_service_name: ttahub-db-backups backup_prefix: production - manage_env_apps: - executor: docker-executor - parameters: - env_list: - type: string - description: "Comma-separated list of environments to manage" - default: "<< pipeline.parameters.env_list >>" - env_state: - type: string - description: "Action to perform on apps (start, stop, restart, restage)" - default: "<< pipeline.parameters.env_state >>" - steps: - - run: - name: Install Cloud Foundry CLI - command: | - curl -v -L -o cf-cli_amd64.deb 'https://packages.cloudfoundry.org/stable?release=debian64&version=v7&source=github' - sudo dpkg -i cf-cli_amd64.deb - - run: - name: Manage Apps - command: | - set -x - env_list="<< parameters.env_list >>" - env_state="<< parameters.env_state >>" - - # Split env_list manually - apps=(${env_list//,/ }) - - for env in "${apps[@]}"; do - # Map full environment name to variable prefixes - if [[ "$env" == "tta-smarthub-sandbox" ]]; then - prefix="SANDBOX" - elif [[ "$env" == "tta-smarthub-dev" ]]; then - prefix="DEV" - else - echo "Unrecognized environment: $env" - exit 1 - fi - - # Retrieve the environment-specific variables - space_var="CLOUDGOV_${prefix}_SPACE" - username_var="CLOUDGOV_${prefix}_USERNAME" - password_var="CLOUDGOV_${prefix}_PASSWORD" - space="${!space_var}" - username="${!username_var}" - password="${!password_var}" - - echo "Logging into space: $space for environment: $env" - cf login \ - -a << pipeline.parameters.cg_api >> \ - -u "$username" \ - -p "$password" \ - -o << pipeline.parameters.cg_org >> \ - -s "$space" - - # Get the current state of the app - state=$(cf app "$env" | grep "state:" | awk '{print $2}') - - # Control app state based on env_state parameter - if [[ "$env_state" == "stop" && "$state" != "stopped" ]]; then - echo "Stopping $env..." - cf stop "$env" - elif [[ "$env_state" == "start" && "$state" != "started" ]]; then - echo "Starting $env..." - cf start "$env" - elif [[ "$env_state" == "restart" ]]; then - echo "Restarting $env..." - cf restart "$env" - elif [[ "$env_state" == "restage" ]]; then - echo "Restaging $env..." - cf restage "$env" - else - echo "$env is already in the desired state: $state" - fi - done workflows: build_test_deploy: when: - and: - # Ensure the workflow is only triggered when `manual-trigger` is false - # and `env_state` is empty (i.e., it's not for starting/stopping environments) - - equal: [false, << pipeline.parameters.manual-trigger >>] - - equal: [false, << pipeline.parameters.manual-manage-env >>] + equal: [false, << pipeline.parameters.manual-trigger >>] jobs: - build_and_lint - build_and_lint_similarity_api @@ -1453,34 +1407,3 @@ workflows: equal: [true, << pipeline.parameters.manual-trigger >>] jobs: - backup_upload_production - stop_lower_env_workflow: - triggers: - - schedule: - cron: "0 1 * * 2-6" # Runs at 6 PM PST M-F (1 AM UTC next day) - filters: - branches: - only: - - main - jobs: - - manage_env_apps: - env_state: "stop" - env_list: "<< pipeline.parameters.env_list >>" - start_lower_env_workflow: - triggers: - - schedule: - cron: "0 11 * * 1-5" # Runs at 6 AM EST M-F(11 AM UTC) - filters: - branches: - only: - - main - jobs: - - manage_env_apps: - env_state: "start" - env_list: "<< pipeline.parameters.env_list >>" - manual_manage_env_workflow: - when: - equal: [true, << pipeline.parameters.manual-manage-env >>] - jobs: - - manage_env_apps: - env_state: "<< pipeline.parameters.env_state >>" - env_list: "<< pipeline.parameters.env_list >>" diff --git a/README.md b/README.md index 27610c0c00..db55a00de0 100644 --- a/README.md +++ b/README.md @@ -213,6 +213,14 @@ On the frontend, the lcov and HTML files are generated as normal, however on the Another important note for running tests on the backend - we specifically exclude files on the backend that follow the ```*CLI.js``` naming convention (for example, ```adminToolsCLI.js```) from test coverage. This is meant to exclude files intended to be run in the shell. Any functionality in theses files should be imported from a file that is tested. The ```src/tools folder``` is where these files have usually lived and there are lots of great examples of the desired pattern in that folder. +### Coverage reports: Uncovered lines on PR builds + +The uncovered lines on PR is generated by finding the intersection between the jest generated coverage file with the git change list for the PR. The additional set of artifacts is generated to aid in providing test coverage for each PR. + * coverage/coverage-final.json - Only on test_backend, all the distinct jest run outputs are consolidated into a unified coverage-final.json file. + * uncovered-lines/uncovered-lines.html - A human readable structure identifing all the lines from this PR that are uncovered by jest tests. + * uncovered-lines/uncovered-lines.json - A json structure identifing all the lines from this PR that are uncovered by jest tests. + + This Uncovered lines on PR builds can be configured to fail builds by either perminently changing or overiding the pipeline perameter ```fail-on-modified-lines``` to true, defaults to false. ## Yarn Commands diff --git a/bin/test-backend-ci b/bin/test-backend-ci index 12049143b3..50f7a5506c 100755 --- a/bin/test-backend-ci +++ b/bin/test-backend-ci @@ -81,6 +81,35 @@ main(){ log "Errors occurred during script execution" fi + #run tests on tools + node_modules/.bin/cross-env \ + JEST_JUNIT_OUTPUT_DIR=reports \ + JEST_JUNIT_OUTPUT_NAME="tools".xml \ + POSTGRES_USERNAME=postgres \ + POSTGRES_DB=ttasmarthub \ + CURRENT_USER_ID=5 \ + CI=true \ + node \ + --expose-gc \ + ./node_modules/.bin/jest \ + tools \ + --coverage \ + --colors \ + --reporters=jest-junit \ + --reporters=default \ + --runInBand \ + --silent \ + --colors \ + --logHeapUsage \ + --coverageDirectory="$(pwd)"/coverage/tools \ + --collectCoverageFrom="tools/**/!(*CLI).{js,ts}" \ + --forceExit + + check_exit "$?" + + # Merge coverage reports + node ./tools/merge-coverage.js + exit "$exit_code" } diff --git a/frontend/src/components/ExpanderButton.js b/frontend/src/components/ExpanderButton.js index 4d981eed60..4423731070 100644 --- a/frontend/src/components/ExpanderButton.js +++ b/frontend/src/components/ExpanderButton.js @@ -25,6 +25,7 @@ export default function ExpanderButton({ className={`usa-button--outline usa-button text-no-underline text-middle tta-smarthub--expander-row-${type}s tta-smarthub--expander-row-${type}s-enabled`} onClick={() => closeOrOpen()} aria-label={`${expanded ? 'Hide' : 'View'} ${ariaLabel}`} + data-testid="expander-button" > {expanded ? 'Hide' : 'View'} {' '} diff --git a/frontend/src/components/GoalCards/ObjectiveCard.js b/frontend/src/components/GoalCards/ObjectiveCard.js index 9e13d4a7d7..65f5128866 100644 --- a/frontend/src/components/GoalCards/ObjectiveCard.js +++ b/frontend/src/components/GoalCards/ObjectiveCard.js @@ -163,7 +163,7 @@ function ObjectiveCard({ { onUpdateObjectiveStatus={onUpdate} forceReadOnly={forceReadOnly} regionId={1} - objectiveId={345345} + objectiveTitle={345345} goalStatus="In Progress" className="test-class" /> diff --git a/frontend/src/components/GoalForm/__tests__/index.js b/frontend/src/components/GoalForm/__tests__/index.js index efc8456590..1442bdf27c 100644 --- a/frontend/src/components/GoalForm/__tests__/index.js +++ b/frontend/src/components/GoalForm/__tests__/index.js @@ -326,6 +326,18 @@ describe('create goal', () => { const save = await screen.findByRole('button', { name: /save/i }); userEvent.click(save); + const sourceValidation = await screen.findByText('Select a goal source'); + expect(sourceValidation).toBeVisible(); + + await act(async () => { + const source = await screen.findByRole('combobox', { name: /goal source/i }); + userEvent.selectOptions(source, 'Federal monitoring issues, including CLASS and RANs'); + }); + + act(() => { + userEvent.click(save); + }); + let alert = await screen.findByRole('alert'); expect(alert.textContent).toBe('There was an error saving your goal'); diff --git a/frontend/src/components/GoalForm/index.js b/frontend/src/components/GoalForm/index.js index 640f5aae4e..0e06ed3138 100644 --- a/frontend/src/components/GoalForm/index.js +++ b/frontend/src/components/GoalForm/index.js @@ -245,7 +245,9 @@ export default function GoalForm({ const newErrors = [...errors]; - if (!source) { + const validSource = Object.values(source).every((s) => Boolean(s)); + + if (!validSource) { error = Select a goal source; } @@ -351,6 +353,7 @@ export default function GoalForm({ // (different validations for not started and draft) const isValidNotStarted = () => ( validateGrantNumbers() + && validateGoalSource() && validateEndDate() && validateObjectives() && validateAllPrompts() diff --git a/frontend/src/components/Navigator/ActivityReportNavigator.js b/frontend/src/components/Navigator/ActivityReportNavigator.js index c013d22a5c..377b1cac5c 100644 --- a/frontend/src/components/Navigator/ActivityReportNavigator.js +++ b/frontend/src/components/Navigator/ActivityReportNavigator.js @@ -19,6 +19,9 @@ import AppLoadingContext from '../../AppLoadingContext'; import { convertGoalsToFormData, packageGoals } from '../../pages/ActivityReport/formDataHelpers'; import { objectivesWithValidResourcesOnly, validateListOfResources } from '../GoalForm/constants'; import Navigator from '.'; +import useFormGrantData from '../../hooks/useFormGrantData'; + +const GOALS_AND_OBJECTIVES_POSITION = 2; /** * @@ -109,6 +112,8 @@ const ActivityReportNavigator = ({ }) => { const [showSavedDraft, updateShowSavedDraft] = useState(false); const page = useMemo(() => pages.find((p) => p.path === currentPage), [currentPage, pages]); + // eslint-disable-next-line max-len + const goalsAndObjectivesPage = useMemo(() => pages.find((p) => p.position === GOALS_AND_OBJECTIVES_POSITION), [pages]); const hookForm = useForm({ mode: 'onBlur', // putting it to onBlur as the onChange breaks the new goal form @@ -166,24 +171,41 @@ const ActivityReportNavigator = ({ const recipients = watch('activityRecipients'); const isRecipientReport = activityRecipientType === 'recipient'; - const grantIds = isRecipientReport ? recipients.map((r) => { - if (r.grant) { - return r.grant.id; - } - - return r.activityRecipientId; - }) : []; + const { + grantIds, + hasMultipleGrants, + } = useFormGrantData(activityRecipientType, recipients); const { isDirty, isValid } = formState; + const recalculatePageState = () => { + const newPageState = { ...pageState }; + const currentGoalsObjectivesPageState = pageState[GOALS_AND_OBJECTIVES_POSITION]; + // eslint-disable-next-line max-len + const isGoalsObjectivesPageComplete = goalsAndObjectivesPage.isPageComplete(getValues(), formState); + const isCurrentPageGoalsObjectives = page.position === GOALS_AND_OBJECTIVES_POSITION; + + if (isGoalsObjectivesPageComplete) { + newPageState[GOALS_AND_OBJECTIVES_POSITION] = COMPLETE; + } else if (isCurrentPageGoalsObjectives && currentGoalsObjectivesPageState === COMPLETE) { + newPageState[GOALS_AND_OBJECTIVES_POSITION] = IN_PROGRESS; + } else if (isCurrentPageGoalsObjectives) { + // eslint-disable-next-line max-len + newPageState[GOALS_AND_OBJECTIVES_POSITION] = isDirty ? IN_PROGRESS : currentGoalsObjectivesPageState; + } + + return newPageState; + }; + const newNavigatorState = () => { - if (page.review) { - return pageState; + const newPageState = recalculatePageState(); + + if (page.review || page.position === GOALS_AND_OBJECTIVES_POSITION) { + return newPageState; } const currentPageState = pageState[page.position]; const isComplete = page.isPageComplete ? page.isPageComplete(getValues(), formState) : isValid; - const newPageState = { ...pageState }; if (isComplete) { newPageState[page.position] = COMPLETE; @@ -195,6 +217,7 @@ const ActivityReportNavigator = ({ return newPageState; }; + const onSaveForm = async (isAutoSave = false, forceUpdate = false) => { setSavingLoadScreen(isAutoSave); if (!editable) { @@ -542,6 +565,7 @@ const ActivityReportNavigator = ({ const areGoalsValid = validateGoals( [goal], setError, + hasMultipleGrants, ); if (areGoalsValid !== true) { diff --git a/frontend/src/components/Navigator/__tests__/ActivityReportNavigator.js b/frontend/src/components/Navigator/__tests__/ActivityReportNavigator.js index a84d2b4e49..ab01a6d467 100644 --- a/frontend/src/components/Navigator/__tests__/ActivityReportNavigator.js +++ b/frontend/src/components/Navigator/__tests__/ActivityReportNavigator.js @@ -90,6 +90,7 @@ const defaultPages = [ path: 'second', label: 'second page', review: false, + isPageComplete: () => false, render: ( _additionalData, _formData, @@ -169,7 +170,7 @@ const initialData = { 'test-prompt': ['test'], }; -describe('Navigator', () => { +describe('ActivityReportNavigator', () => { beforeAll(async () => { jest.useFakeTimers(); }); diff --git a/frontend/src/fetchers/ssdi.js b/frontend/src/fetchers/ssdi.js index bc0c68b95d..77a896024c 100644 --- a/frontend/src/fetchers/ssdi.js +++ b/frontend/src/fetchers/ssdi.js @@ -95,9 +95,11 @@ export const getSelfServiceData = async (filterName, filters, dataSetSelection = const url = getSelfServiceUrl(filterName, filters); const urlToUse = url + dataSetSelection.map((s) => `&dataSetSelection[]=${s}`).join(''); - const response = await get(urlToUse); - if (!response.ok) { - throw new Error('Error fetching self service data'); - } - return response.json(); + + return get(urlToUse).then((response) => { + if (!response.ok) { + throw new Error('Error fetching self service data'); + } + return response.json(); + }); }; diff --git a/frontend/src/hooks/useFormGrantData.js b/frontend/src/hooks/useFormGrantData.js new file mode 100644 index 0000000000..424794b78e --- /dev/null +++ b/frontend/src/hooks/useFormGrantData.js @@ -0,0 +1,23 @@ +import { useMemo } from 'react'; + +export function calculateFormGrantData(activityRecipientType, activityRecipients) { + const isRecipient = activityRecipientType === 'recipient'; + const grants = isRecipient ? activityRecipients.map((r) => { + if (r.grant) { + return r.grant.id; + } + return r.activityRecipientId; + }) : []; + + return { + isRecipientReport: isRecipient, + grantIds: grants, + hasGrant: grants.length > 0, + hasMultipleGrants: grants.length > 1, + }; +} + +export default function useFormGrantData(activityRecipientType, activityRecipients) { + // eslint-disable-next-line max-len + return useMemo(() => calculateFormGrantData(activityRecipientType, activityRecipients), [activityRecipientType, activityRecipients]); +} diff --git a/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js b/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js index b67fc55b9e..bd30dc0d3e 100644 --- a/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js +++ b/frontend/src/pages/ActivityReport/Pages/__tests__/goalsObjectives.js @@ -346,7 +346,11 @@ describe('goals objectives', () => { describe('for recipient reports', () => { it('is false if goals are not valid', () => { - const complete = goalsObjectives.isPageComplete({ activityRecipientType: 'recipient', goals: [] }); + const complete = goalsObjectives.isPageComplete({ + activityRecipientType: 'recipient', + activityRecipients: [], + goals: [], + }); expect(complete).toBeFalsy(); }); @@ -355,6 +359,7 @@ describe('goals objectives', () => { name: 'Is goal', endDate: '2021-01-01', isRttapa: 'No', + source: 'Source!!', objectives: [{ id: 1, title: 'title', @@ -366,7 +371,11 @@ describe('goals objectives', () => { supportType: SUPPORT_TYPES[3], }], }]; - const complete = goalsObjectives.isPageComplete({ activityRecipientType: 'recipient', goals }); + const complete = goalsObjectives.isPageComplete({ + activityRecipientType: 'recipient', + activityRecipients: [], + goals, + }); expect(complete).toBeTruthy(); }); @@ -385,7 +394,12 @@ describe('goals objectives', () => { roles: ['Chief Inspector'], }], }]; - const complete = goalsObjectives.isPageComplete({ activityRecipientType: 'recipient', goals, goalForEditing: { name: 'is goal 2' } }); + const complete = goalsObjectives.isPageComplete({ + activityRecipientType: 'recipient', + activityRecipients: [], + goals, + goalForEditing: { name: 'is goal 2' }, + }); expect(complete).toBeFalsy(); }); }); @@ -421,7 +435,7 @@ describe('goals objectives', () => { }); it('isPageComplete is false', async () => { - const formData = { activityRecipientType: 'recipient', goals: [] }; + const formData = { activityRecipientType: 'recipient', goals: [], activityRecipients: [] }; const isComplete = goalsObjectives.isPageComplete(formData); expect(isComplete).not.toBeTruthy(); }); diff --git a/frontend/src/pages/ActivityReport/Pages/components/__tests__/goalValidator.js b/frontend/src/pages/ActivityReport/Pages/components/__tests__/goalValidator.js index 2881ab88b5..fde5eb245f 100644 --- a/frontend/src/pages/ActivityReport/Pages/components/__tests__/goalValidator.js +++ b/frontend/src/pages/ActivityReport/Pages/components/__tests__/goalValidator.js @@ -15,6 +15,7 @@ import { } from '../goalValidator'; import { GOAL_NAME_ERROR, + GOAL_SOURCE_ERROR, } from '../../../../../components/GoalForm/constants'; const missingTitle = { @@ -48,6 +49,7 @@ const goalUnfinishedObjective = { name: 'Test goal', endDate: '2021-01-01', isRttapa: 'No', + source: 'source', objectives: [ { ...validObjective }, { ...missingTTAProvided }, @@ -58,10 +60,22 @@ const goalNoObjectives = { name: 'Test goal', endDate: '2021-01-01', isRttapa: 'No', + source: 'source', objectives: [], }; const goalValid = { + name: 'Test goal', + endDate: '2021-01-01', + isRttapa: 'No', + source: 'Source', + objectives: [ + { ...validObjective }, + { ...validObjective }, + ], +}; + +const goalNoSource = { name: 'Test goal', endDate: '2021-01-01', isRttapa: 'No', @@ -262,6 +276,16 @@ describe('validateGoals', () => { const result = validateGoals(goals); expect(result).toEqual(UNFINISHED_OBJECTIVES); }); + + it('if no source', () => { + const goals = [ + { ...goalValid }, + { ...goalNoSource }, + ]; + + const result = validateGoals(goals); + expect(result).toEqual(GOAL_SOURCE_ERROR); + }); }); describe('returns true', () => { diff --git a/frontend/src/pages/ActivityReport/Pages/components/goalValidator.js b/frontend/src/pages/ActivityReport/Pages/components/goalValidator.js index 64dd429364..88c7dc095b 100644 --- a/frontend/src/pages/ActivityReport/Pages/components/goalValidator.js +++ b/frontend/src/pages/ActivityReport/Pages/components/goalValidator.js @@ -1,6 +1,7 @@ import { validateListOfResources, GOAL_NAME_ERROR, + GOAL_SOURCE_ERROR, } from '../../../../components/GoalForm/constants'; export const UNFINISHED_OBJECTIVES = 'All objective fields must be completed'; @@ -78,7 +79,7 @@ export const unfinishedObjectives = ( return unfinished ? UNFINISHED_OBJECTIVES : false; }; -export const unfinishedGoals = (goals, setError = () => {}) => { +export const unfinishedGoals = (goals, setError = () => {}, hasMultipleGrants) => { for (let i = 0; i < goals.length; i += 1) { const goal = goals[i]; @@ -87,6 +88,11 @@ export const unfinishedGoals = (goals, setError = () => {}) => { return GOAL_NAME_ERROR; } + if (!goal.source && !hasMultipleGrants) { + setError('goalSource', { message: GOAL_SOURCE_ERROR }); + return GOAL_SOURCE_ERROR; + } + // Every goal must have an objective or the `goals` field has unfinished goals if (goal.objectives && goal.objectives.length > 0) { const objectivesUnfinished = unfinishedObjectives(goal.objectives, setError, 'goalForEditing.objectives'); @@ -102,12 +108,12 @@ export const unfinishedGoals = (goals, setError = () => {}) => { return false; }; -export const validateGoals = (goals, setError = () => {}) => { +export const validateGoals = (goals, setError = () => {}, hasMultipleGrants = false) => { if (goals.length < 1) { return GOALS_EMPTY; } - const unfinishedMessage = unfinishedGoals(goals, setError); + const unfinishedMessage = unfinishedGoals(goals, setError, hasMultipleGrants); if (unfinishedMessage) { return unfinishedMessage; } diff --git a/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js b/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js index 57ca8d1494..722fb8d349 100644 --- a/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js +++ b/frontend/src/pages/ActivityReport/Pages/goalsObjectives.js @@ -2,7 +2,7 @@ // disabling prop spreading to use the "register" function from react hook form the same // way they did in their examples import React, { - useState, useContext, useMemo, + useState, useContext, } from 'react'; import PropTypes from 'prop-types'; import { Helmet } from 'react-helmet'; @@ -26,6 +26,8 @@ import ReadOnlyOtherEntityObjectives from '../../../components/GoalForm/ReadOnly import IndicatesRequiredField from '../../../components/IndicatesRequiredField'; import { getGoalTemplates } from '../../../fetchers/goalTemplates'; import NavigatorButtons from '../../../components/Navigator/components/NavigatorButtons'; +import { NOOP } from '../../../Constants'; +import useFormGrantData, { calculateFormGrantData } from '../../../hooks/useFormGrantData'; const GOALS_AND_OBJECTIVES_PAGE_STATE_IDENTIFIER = '2'; @@ -105,20 +107,9 @@ const GoalsObjectives = ({ const { isRecipientReport, grantIds, - } = useMemo(() => { - const isRecipient = activityRecipientType === 'recipient'; - const grants = isRecipient ? activityRecipients.map((r) => { - if (r.grant) { - return r.grant.id; - } - return r.activityRecipientId; - }) : []; - - return { - isRecipientReport: isRecipient, - grantIds: grants, - }; - }, [activityRecipientType, activityRecipients]); + hasMultipleGrants, + hasGrant, + } = useFormGrantData(activityRecipientType, activityRecipients); const isOtherEntityReport = activityRecipientType === 'other-entity'; const activityRecipientIds = activityRecipients.map((r) => r.activityRecipientId); @@ -126,7 +117,6 @@ const GoalsObjectives = ({ const [fetchError, setFetchError] = useState(false); const [availableGoals, updateAvailableGoals] = useState([]); const [goalTemplates, setGoalTemplates] = useState([]); - const hasGrants = grantIds.length > 0; const { field: { @@ -145,7 +135,7 @@ const GoalsObjectives = ({ useDeepCompareEffect(() => { const fetchGoalTemplates = async () => { - if (isRecipientReport && hasGrants) { + if (isRecipientReport && hasGrant) { try { const fetchedGoalTemplates = await getGoalTemplates(grantIds); @@ -168,12 +158,12 @@ const GoalsObjectives = ({ }; fetchGoalTemplates(); - }, [grantIds, hasGrants, isRecipientReport]); + }, [grantIds, hasGrant, isRecipientReport]); useDeepCompareEffect(() => { const fetch = async () => { try { - if (isRecipientReport && hasGrants) { + if (isRecipientReport && hasGrant) { const fetchedGoals = await getGoals(grantIds); const formattedGoals = fetchedGoals.map((g) => { // if the goal is on an "old" grant, we should @@ -196,9 +186,9 @@ const GoalsObjectives = ({ } }; fetch(); - }, [grantIds, hasGrants, isRecipientReport]); + }, [grantIds, hasGrant, isRecipientReport]); - const showGoals = isRecipientReport && hasGrants; + const showGoals = isRecipientReport && hasGrant; const addNewGoal = () => { toggleGoalForm(false); @@ -254,6 +244,7 @@ const GoalsObjectives = ({ objectives: goalForEditingObjectives, }], setError, + hasMultipleGrants, ); if (areGoalsValid !== true) { @@ -454,7 +445,9 @@ export default { path: 'goals-objectives', review: false, isPageComplete: (formData) => { - const { activityRecipientType } = formData; + const { activityRecipientType, activityRecipients } = formData; + + const { hasMultipleGrants } = calculateFormGrantData(activityRecipientType, activityRecipients); if (!activityRecipientType) { return false; @@ -470,7 +463,7 @@ export default { return false; } - return activityRecipientType === 'recipient' && validateGoals(formData.goals) === true; + return activityRecipientType === 'recipient' && validateGoals(formData.goals, NOOP, hasMultipleGrants) === true; }, reviewSection: () => , render: ( diff --git a/frontend/src/pages/ActivityReport/__tests__/index.js b/frontend/src/pages/ActivityReport/__tests__/index.js index d59fc0d7a1..a362305a38 100644 --- a/frontend/src/pages/ActivityReport/__tests__/index.js +++ b/frontend/src/pages/ActivityReport/__tests__/index.js @@ -1009,6 +1009,7 @@ describe('ActivityReport', () => { status: 'Draft', isRttapa: null, isCurated: false, + source: 'Source', }], }); diff --git a/package.json b/package.json index 32342234c9..a6b437a0b0 100644 --- a/package.json +++ b/package.json @@ -102,7 +102,9 @@ "populateLegacyResourceTitles:local": "./node_modules/.bin/babel-node ./src/tools/populateLegacyResourceTitlesCli.js", "updateCompletedEventReportPilots": "node ./build/server/src/tools/updateCompletedEventReportPilotsCLI.js", "updateCompletedEventReportPilots:local": "./node_modules/.bin/babel-node ./src/tools/updateCompletedEventReportPilotsCLI.js", - "publish:common": "yarn --cwd ./packages/common publish" + "publish:common": "yarn --cwd ./packages/common publish", + "merge-coverage": "node ./src/tools/merge-coverage.js", + "check-coverage": "node -r esm ./src/tools/check-coverage.js --fail-on-uncovered=true" }, "repository": { "type": "git", @@ -227,6 +229,12 @@ "/src/tools/populateLegacyResourceTitlesCli.js", "/src/tools/importTTAPlanGoals.js" ], + "coverageReporters": [ + "json", + "lcov", + "text", + "clover" + ], "coverageThreshold": { "global": { "statements": 75, @@ -286,10 +294,15 @@ "eslint-plugin-jest": "^25.3.4", "eslint-plugin-jsx-a11y": "^6.3.1", "eslint-plugin-react": "^7.20.6", + "esm": "^3.2.25", "ioredis-mock": "^8.9.0", + "istanbul-lib-coverage": "^3.2.2", + "istanbul-lib-report": "^3.0.1", "jest": "26.6.0", "jest-cli": "26.4.2", "jest-junit": "^12.0.0", + "markdown-table": "^3.0.4", + "mock-fs": "^5.4.1", "nodemon": "^2.0.4", "playwright": "^1.46.0", "puppeteer": "^13.1.1", @@ -367,7 +380,7 @@ "winston": "^3.3.3", "ws": "^8.17.1", "xml2js": "^0.6.2", - "yargs": "^17.3.1", + "yargs": "^17.7.2", "yayson": "^2.1.0" } } diff --git a/similarity_api/.python-version b/similarity_api/.python-version index 8e34c8131c..9432908664 100644 --- a/similarity_api/.python-version +++ b/similarity_api/.python-version @@ -1 +1 @@ -3.9.19 +3.9.20 diff --git a/similarity_api/Dockerfile b/similarity_api/Dockerfile index 55bb0d304a..96f81a05b4 100644 --- a/similarity_api/Dockerfile +++ b/similarity_api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.19 +FROM python:3.9.20 WORKDIR /app COPY src/requirements.txt . RUN pip install --no-cache-dir -r requirements.txt diff --git a/similarity_api/activate-env.sh b/similarity_api/activate-env.sh index f14b15009d..bb8dd86341 100755 --- a/similarity_api/activate-env.sh +++ b/similarity_api/activate-env.sh @@ -1,4 +1,4 @@ #!/usr/bin/env bash # This assumes you've already run `pyenv install` in this directory. -pyenv local 3.9.19 +pyenv local 3.9.20 diff --git a/similarity_api/runtime.txt b/similarity_api/runtime.txt index 4458b433c0..57f558859c 100644 --- a/similarity_api/runtime.txt +++ b/similarity_api/runtime.txt @@ -1 +1 @@ -python-3.9.19 +python-3.9.20 diff --git a/src/constants.js b/src/constants.js index b99de39882..a5b5870552 100644 --- a/src/constants.js +++ b/src/constants.js @@ -38,9 +38,11 @@ const SEARCH_RESULTS_PER_PAGE = 23; const AUTOMATIC_CREATION = 'Automatic'; const CURATED_CREATION = 'Curated'; +const SYSTEM_GENERATED = 'System Generated'; const CREATION_METHOD = { AUTOMATIC: AUTOMATIC_CREATION, CURATED: CURATED_CREATION, + SYSTEM_GENERATED, }; const GOAL_STATUS = { @@ -248,7 +250,7 @@ const MAINTENANCE_CATEGORY = { const GOAL_CREATED_VIA = ['imported', 'activityReport', 'rtr', 'merge', 'admin']; -const CURRENT_GOAL_SIMILARITY_VERSION = 4; +const CURRENT_GOAL_SIMILARITY_VERSION = 5; const FEI_PROD_GOAL_TEMPLATE_ID = 19017; const CLASS_MONITORING_PROD_GOAL_TEMPLATE_ID = 18172; diff --git a/src/goalServices/extractObjectiveAssociationsFromActivityReportObjectives.test.ts b/src/goalServices/extractObjectiveAssociationsFromActivityReportObjectives.test.ts new file mode 100644 index 0000000000..b2dc606238 --- /dev/null +++ b/src/goalServices/extractObjectiveAssociationsFromActivityReportObjectives.test.ts @@ -0,0 +1,20 @@ +import extractObjectiveAssociationsFromActivityReportObjectives from './extractObjectiveAssociationsFromActivityReportObjectives'; +import { IActivityReportObjectivesModelInstance } from './types'; + +describe('extractObjectiveAssociationsFromActivityReportObjectives', () => { + it('should extract associations and call toJSON on each association', () => { + const mockToJson = jest.fn().mockReturnValue({ id: 1, name: 'Mocked Association' }); + + const mockActivityReportObjective = { + courses: [{ toJSON: mockToJson }], + } as unknown as IActivityReportObjectivesModelInstance; + + const associations = extractObjectiveAssociationsFromActivityReportObjectives( + [mockActivityReportObjective], + 'courses', + ); + + expect(associations).toEqual([{ id: 1, name: 'Mocked Association' }]); + expect(mockToJson).toHaveBeenCalledTimes(1); + }); +}); diff --git a/src/goalServices/goals.js b/src/goalServices/goals.js index 84ba4564b8..5dca1120bd 100644 --- a/src/goalServices/goals.js +++ b/src/goalServices/goals.js @@ -1331,6 +1331,7 @@ export async function saveGoalsForReport(goals, report) { if (goalTemplate && goalTemplate.creationMethod === CREATION_METHOD.CURATED) { newOrUpdatedGoal = await Goal.create({ goalTemplateId, + createdVia: 'activityReport', name: goal.name ? goal.name.trim() : '', grantId, status, @@ -1674,6 +1675,28 @@ const fieldMappingForDeduplication = { export const hasMultipleGoalsOnSameActivityReport = (countObject) => Object.values(countObject) .some((grants) => Object.values(grants).some((c) => c > 1)); +export function groupSimilarGoalsByGrant(result) { + const completeGroupsByGrant = (result || []).reduce((acc, matchedGoals) => { + const { id, matches } = matchedGoals; + const grantIdGroups = matches.reduce((innerAcc, match) => { + if (!innerAcc[match.grantId]) { + return { ...innerAcc, [match.grantId]: [match.id] }; + } + innerAcc[match.grantId].push(match.id); + return innerAcc; + }, {}); + + acc.push({ id, grantIdGroups }); + return acc; + }, []); + + // Return groups by grant id. + const goalIdGroups = completeGroupsByGrant.map( + (matchedGoalsByGrant) => uniq(Object.values(matchedGoalsByGrant.grantIdGroups)), + ).flat().filter((group) => group.length > 1); + return goalIdGroups; +} + /** * @param {Number} recipientId * @returns { @@ -1706,7 +1729,7 @@ export async function getGoalIdsBySimilarity(recipientId, regionId, user = null) regionId, ); - if (existingRecipientGroups.length) { + if (existingRecipientGroups && existingRecipientGroups.length) { return existingRecipientGroups; } @@ -1718,11 +1741,8 @@ export async function getGoalIdsBySimilarity(recipientId, regionId, user = null) result = similarity.result; } - // convert the response to a group of IDs - const goalIdGroups = (result || []).map((matchedGoals) => { - const { id, matches } = matchedGoals; - return uniq([id, ...matches.map((match) => match.id)]); - }); + // Group goal matches by grantId. + const goalIdGroups = groupSimilarGoalsByGrant(result); const invalidStatusesForReportGoals = [ REPORT_STATUSES.SUBMITTED, diff --git a/src/goalServices/goals.test.js b/src/goalServices/goals.test.js index bf1f6d0deb..32a4a3c936 100644 --- a/src/goalServices/goals.test.js +++ b/src/goalServices/goals.test.js @@ -1,6 +1,4 @@ import { Op } from 'sequelize'; -import { uniq } from 'lodash'; -import { CLOSE_SUSPEND_REASONS } from '@ttahub/common'; import { goalsByIdsAndActivityReport, goalByIdAndActivityReport, @@ -20,6 +18,7 @@ import { getGoalIdsBySimilarity, destroyGoal, mapGrantsWithReplacements, + groupSimilarGoalsByGrant, } from './goals'; import { sequelize, @@ -2188,6 +2187,83 @@ describe('Goals DB service', () => { expect(goalIdGroups).toEqual(expect.any(Array)); expect(goalIdGroups[0].goals).toContain(3); }); + + it('groupSimilarGoalsByGrant works when undefined is passed to the function', async () => { + const resultToTest = undefined; + + const groupedResult = groupSimilarGoalsByGrant(resultToTest); + + expect(groupedResult).toEqual([]); + }); + + it('groupSimilarGoalsByGrant works as expected given a result that contains multiple grants for the same goal', async () => { + const resultToTest = [ + { + id: 1, + matches: [ + { + grantId: 1, + id: 1, + name: 'Similar Goal 1', + similarity: 0.9449410438537598, + }, + { + grantId: 2, + id: 2, + name: 'Similar Goal 1, but not quite', + similarity: 0.9449410438537598, + }, + { + grantId: 1, + id: 3, + name: 'Similar Goal 1, but not quite, there is a diff', + similarity: 0.9449410438537598, + }, + { + grantId: 3, + id: 4, + name: 'Similar Goal 1, but not quite, there is a diff, at all', + similarity: 0.9449410438537598, + }, + ], + name: 'Similar Goal 1', + }, + { + id: 1, + matches: [ + { + grantId: 2, + id: 5, + name: 'Similar Goal 2', + similarity: 0.9449410438537598, + }, + { + grantId: 2, + id: 6, + name: 'Similar Goal 2, but not quite', + similarity: 0.9449410438537598, + }, + { + grantId: 1, + id: 7, + name: 'Similar Goal 2, but not quite, there is a diff', + similarity: 0.9449410438537598, + }, + ], + name: 'Similar Goal 2', + }, + ]; + + const groupedResult = groupSimilarGoalsByGrant(resultToTest); + + // Assert that the result is grouped by grantId. + // Ensure groups that had only one goal are excluded. + expect(groupedResult.length).toBe(2); + expect(groupedResult).toEqual(expect.arrayContaining([ + expect.arrayContaining([1, 3]), + expect.arrayContaining([5, 6]), + ])); + }); }); describe('destroyGoal', () => { @@ -2195,6 +2271,14 @@ describe('Goals DB service', () => { jest.clearAllMocks(); }); + it('should return 0 when goalIds is not a valid array of numbers', async () => { + const singleResult = await destroyGoal('notAnArray'); + const arrayResult = await destroyGoal([]); + + expect(singleResult).toEqual(0); + expect(arrayResult).toEqual({ goalsDestroyed: undefined, objectivesDestroyed: undefined }); + }); + it('should delete objectives and goals if goalIds are provided', async () => { const goalIds = [1, 2]; const objectiveIds = [10, 11]; diff --git a/src/goalServices/goalsFromTemplate.ts b/src/goalServices/goalsFromTemplate.ts index 3abf8e5d0f..ec8f77b09f 100644 --- a/src/goalServices/goalsFromTemplate.ts +++ b/src/goalServices/goalsFromTemplate.ts @@ -61,7 +61,7 @@ export default async function goalsFromTemplate( const unsuspends = suspendedGoals.map((goal) => changeGoalStatus({ goalId: goal.id, userId, - newStatus: 'In Progress', + newStatus: GOAL_STATUS.IN_PROGRESS, reason: '', context: '', })); @@ -75,6 +75,7 @@ export default async function goalsFromTemplate( status: GOAL_STATUS.IN_PROGRESS, source: template.source, name: template.templateName, + createdVia: 'rtr', }, { individualHooks: true, returning: ['id'], diff --git a/src/goalServices/helpers.js b/src/goalServices/helpers.js index 0722e8c5b9..da54ae267c 100644 --- a/src/goalServices/helpers.js +++ b/src/goalServices/helpers.js @@ -13,23 +13,23 @@ const findOrFailExistingGoal = (needle, haystack, translate = goalFieldTransate) (c) => c.goalCreatorName, ).filter(Boolean); - const haystackCollaborators = haystack.flatMap( - (g) => (g.collaborators || []).map((c) => c.goalCreatorName).filter(Boolean), - ); + return haystack.find((g) => { + const haystackCollaborators = (g.collaborators || []) + .map((c) => c.goalCreatorName).filter(Boolean); - return haystack.find((g) => ( - g[translate.status] === needle.status - && g[translate.name].trim() === needle.name.trim() - && g[translate.source] === needle.source - && g.isFei === needle.dataValues.isFei - && g[translate.responsesForComparison] === responsesForComparison(needle) - && ( - // Check if both needle and haystack goal have no valid collaborators - (needleCollaborators.length === 0 && (g.collaborators || []) - .every((c) => c.goalCreatorName === undefined)) - || haystackCollaborators.some((c) => needleCollaborators.includes(c)) - ) - )); + return ( + g[translate.status] === needle.status + && g[translate.name].trim() === needle.name.trim() + && g[translate.source] === needle.source + && g.isFei === needle.dataValues.isFei + && g[translate.responsesForComparison] === responsesForComparison(needle) + && ( + (needleCollaborators.length === 0 && haystackCollaborators.length === 0) + || haystackCollaborators + .some((collaborator) => needleCollaborators.includes(collaborator)) + ) + ); + }); }; export { diff --git a/src/goalServices/helpers.test.js b/src/goalServices/helpers.test.js new file mode 100644 index 0000000000..edcc722774 --- /dev/null +++ b/src/goalServices/helpers.test.js @@ -0,0 +1,45 @@ +import { findOrFailExistingGoal } from './helpers'; + +describe('findOrFailExistingGoal', () => { + const needle = { + status: 'Draft', + name: 'Test Goal', + source: 'Test Source', + dataValues: { isFei: true }, + responses: [{ response: 'Response 1' }, { response: 'Response 2' }], + collaborators: [], + }; + + const haystack = [ + { + goalStatus: 'Draft', + goalText: 'Test Goal', + source: 'Test Source', + isFei: true, + collaborators: [{ goalCreatorName: undefined }], + responsesForComparison: 'Response 1,Response 2', + }, + { + goalStatus: 'Draft', + goalText: 'Test Goal', + source: 'Test Source', + isFei: true, + collaborators: [{ goalCreatorName: 'John Doe' }], + responsesForComparison: 'Response 1,Response 2', + }, + ]; + + it('should return a goal with undefined collaborator names', () => { + const result = findOrFailExistingGoal(needle, haystack); + expect(result).toEqual(haystack[0]); + }); + + it('should return a goal with a collaborator that matches the needle collaborator', () => { + const customNeedle = { + ...needle, + collaborators: [{ goalCreatorName: 'John Doe' }], + }; + const result = findOrFailExistingGoal(customNeedle, haystack); + expect(result).toEqual(haystack[1]); + }); +}); diff --git a/src/goalServices/nudge.test.js b/src/goalServices/nudge.test.js index 052755d836..ed034518ac 100644 --- a/src/goalServices/nudge.test.js +++ b/src/goalServices/nudge.test.js @@ -71,6 +71,57 @@ describe('nudge', () => { await db.sequelize.close(); }); + it('should not unshift a template goal if it is already in the goals', async () => { + const recipientId = 1; + const text = 'Some goal text'; + const grantNumbers = ['GRANT-1234567']; + + const matchingId = 1; + const templateName = 'Template Goal'; + const templateSource = 'Some source'; + + const newGrant = await createGrant({ number: grantNumbers[0], recipientId }); + + similarGoalsForRecipient.mockReturnValueOnce({ + result: [ + { + goal: { + id: matchingId, + name: templateName, + isTemplate: true, + source: templateSource, + endDate: '', + }, + similarity: 0.7, + }, + ], + }); + + // Create a goal with a matching template ID and valid grantId + await Goal.create({ + id: 999111, + grantId: newGrant.id, + name: 'Existing Goal', + status: GOAL_STATUS.NOT_STARTED, + goalTemplateId: matchingId, + }); + + const results = await nudge(recipientId, text, grantNumbers); + + expect(results).not.toContainEqual([{ + ids: [matchingId], + name: templateName, + status: GOAL_STATUS.NOT_STARTED, + goalTemplateId: matchingId, + isCuratedTemplate: true, + endDate: '', + source: templateSource, + }]); + + await Goal.destroy({ where: { id: 999111 }, force: true }); + await Grant.destroy({ where: { id: newGrant.id }, force: true, individualHooks: true }); + }); + it('should return a nudge', async () => { const goalName = goal.name; const goalId = goal.id; diff --git a/src/goalServices/reduceGoals.test.ts b/src/goalServices/reduceGoals.test.ts new file mode 100644 index 0000000000..3d7a174c33 --- /dev/null +++ b/src/goalServices/reduceGoals.test.ts @@ -0,0 +1,71 @@ +import { reduceGoals } from './reduceGoals'; + +describe('reduceGoals', () => { + const goals = [ + { + id: 1, + name: null, // branch coverage case + status: 'Draft', + isCurated: false, + objectives: [], + grant: { + recipientId: 1, + numberWithProgramTypes: 1, + recipient: { + dataValues: {}, + }, + }, + dataValues: { + endDate: '2023-12-31', + grant: { + recipientId: 1, + numberWithProgramTypes: 1, + }, + }, + endDate: '2023-12-31', + grantId: 1, + createdVia: 'rtr', + source: 'Source', + }, + { + id: 2, + name: '', + status: 'Draft', + isCurated: false, + objectives: [], + grant: { + recipientId: 1, + numberWithProgramTypes: 1, + recipient: { + dataValues: {}, + }, + }, + dataValues: { + endDate: '2023-12-31', + grant: { + recipientId: 1, + numberWithProgramTypes: 1, + }, + }, + endDate: '2023-12-31', + grantId: 1, + createdVia: 'rtr', + source: 'Source', + }, + ]; + + beforeEach(() => { + jest.clearAllMocks(); + }); + + it('should return undefined if no goals are provided', () => { + const result = reduceGoals([]); + expect(result).toEqual([]); + }); + + it('should return ...something', () => { + // @ts-ignore + const result = reduceGoals(goals); + expect(result.length).toEqual(1); + }); +}); diff --git a/src/goalServices/wasGoalPreviouslyClosed.test.ts b/src/goalServices/wasGoalPreviouslyClosed.test.ts new file mode 100644 index 0000000000..eb9e9ce6a0 --- /dev/null +++ b/src/goalServices/wasGoalPreviouslyClosed.test.ts @@ -0,0 +1,39 @@ +import wasGoalPreviouslyClosed from './wasGoalPreviouslyClosed'; +import { GOAL_STATUS } from '../constants'; + +describe('wasGoalPreviouslyClosed', () => { + it('returns true if the goal was previously closed', () => { + const goal = { + statusChanges: [ + { oldStatus: GOAL_STATUS.IN_PROGRESS }, + { oldStatus: GOAL_STATUS.CLOSED }, + ], + }; + expect(wasGoalPreviouslyClosed(goal)).toBe(true); + }); + + it('returns false if the goal was never closed', () => { + const goal = { + statusChanges: [ + { oldStatus: GOAL_STATUS.IN_PROGRESS }, + { oldStatus: GOAL_STATUS.NOT_STARTED }, + ], + }; + expect(wasGoalPreviouslyClosed(goal)).toBe(false); + }); + + it('returns false if there are no status changes', () => { + const goal = {}; + expect(wasGoalPreviouslyClosed(goal)).toBe(false); + }); + + it('returns false if status changes do not contain closed status', () => { + const goal = { + statusChanges: [ + { oldStatus: 'Archived' }, + { oldStatus: 'Completed' }, + ], + }; + expect(wasGoalPreviouslyClosed(goal)).toBe(false); + }); +}); diff --git a/src/lib/mailer/index.js b/src/lib/mailer/index.js index 2da8af6855..563b3f901a 100644 --- a/src/lib/mailer/index.js +++ b/src/lib/mailer/index.js @@ -19,7 +19,7 @@ import { activityReportsApprovedByDate, } from '../../services/activityReports'; import { userById } from '../../services/users'; -import logEmailNotification from './logNotifications'; +import logEmailNotification, { logDigestEmailNotification } from './logNotifications'; import transactionQueueWrapper from '../../workers/transactionWrapper'; import referenceData from '../../workers/referenceData'; import safeParse from '../../models/helpers/safeParse'; @@ -96,16 +96,32 @@ export const filterAndDeduplicateEmails = (emails) => { }; export const onFailedNotification = (job, error) => { - auditLogger.error(`job ${job.name} failed for report ${job.data.report.displayId} with error ${error}`); - logEmailNotification(job, false, error); + if (job.data.reports && Array.isArray(job.data.reports)) { + job.data.reports.forEach((report) => { + auditLogger.error(`job ${job.name} failed for report ${report.displayId} with error ${error}`); + }); + logDigestEmailNotification(job, false, error); + } else { + auditLogger.error(`job ${job.name} failed for report ${(job.data.report?.displayId) || 'unknown'} with error ${error}`); + logEmailNotification(job, false, error); + } }; export const onCompletedNotification = (job, result) => { - if (result != null) { - logger.info(`Successfully sent ${job.name} notification for ${job.data.report.displayId || job.data.report.id}`); + if (job.data.reports && Array.isArray(job.data.reports)) { + job.data.reports.forEach((report) => { + if (result != null) { + logger.info(`Successfully sent ${job.name} notification for ${report.displayId}`); + logDigestEmailNotification(job, true, result); + } else { + logger.info(`Did not send ${job.name} notification for ${report.displayId} preferences are not set or marked as "no-send"`); + } + }); + } else if (result != null) { + logger.info(`Successfully sent ${job.name} notification for ${job.data.report.displayId || job.data}`); logEmailNotification(job, true, result); } else { - logger.info(`Did not send ${job.name} notification for ${job.data.report.displayId || job.data.report.id} preferences are not set or marked as "no-send"`); + logger.info(`Did not send ${job.name} notification for ${job.data.report.displayId || job.data} preferences are not set or marked as "no-send"`); } }; @@ -872,13 +888,13 @@ export async function recipientApprovedDigest(freq, subjectFreq) { const data = { user, reports, - type: EMAIL_ACTIONS.RECIPIENT_APPROVED_DIGEST, + type: EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, freq, subjectFreq, ...referenceData(), }; - notificationQueue.add(EMAIL_ACTIONS.RECIPIENT_APPROVED_DIGEST, data); + notificationQueue.add(EMAIL_ACTIONS.RECIPIENT_REPORT_APPROVED_DIGEST, data); return data; }); diff --git a/src/lib/mailer/index.test.js b/src/lib/mailer/index.test.js index e5fb1b9f34..41d0838fa7 100644 --- a/src/lib/mailer/index.test.js +++ b/src/lib/mailer/index.test.js @@ -21,13 +21,14 @@ import { trSessionCreated, trCollaboratorAdded, filterAndDeduplicateEmails, + onCompletedNotification, } from '.'; import { EMAIL_ACTIONS, EMAIL_DIGEST_FREQ, DIGEST_SUBJECT_FREQ, } from '../../constants'; -import { auditLogger as logger } from '../../logger'; +import { auditLogger, logger } from '../../logger'; import { userById } from '../../services/users'; import db, { ActivityReport, ActivityReportCollaborator, User, ActivityReportApprover, @@ -166,6 +167,96 @@ describe('mailer tests', () => { jest.clearAllMocks(); }); + describe('onCompletedNotification', () => { + afterEach(() => { + logger.info.mockClear(); + }); + + it('logs if result is null, single report', () => { + onCompletedNotification({ + data: { + report: mockReport, + }, + name: EMAIL_ACTIONS.APPROVED, + }, null); + + expect(logger.info).toHaveBeenCalledWith(`Did not send ${EMAIL_ACTIONS.APPROVED} notification for ${mockReport.displayId} preferences are not set or marked as "no-send"`); + }); + + it('logs if result is null, single report, no display id', () => { + onCompletedNotification({ + data: { + report: { + ...mockReport, + displayId: null, + }, + }, + name: EMAIL_ACTIONS.APPROVED, + }, null); + + expect(logger.info).toHaveBeenCalledWith(`Did not send ${EMAIL_ACTIONS.APPROVED} notification for [object Object] preferences are not set or marked as "no-send"`); + }); + it('logs if result is good, single report', () => { + onCompletedNotification({ + data: { + report: mockReport, + }, + name: EMAIL_ACTIONS.APPROVED, + }, { notNull: true }); + + expect(logger.info).toHaveBeenCalledWith('Successfully sent reportApproved notification for mockReport-1'); + }); + + it('logs if result is good, single report, no display id', () => { + onCompletedNotification({ + data: { + report: { + ...mockReport, + displayId: null, + }, + }, + name: EMAIL_ACTIONS.APPROVED, + }, { notNull: true }); + + expect(logger.info).toHaveBeenCalledWith('Successfully sent reportApproved notification for [object Object]'); + }); + + it('logs if result is good, reports exists but is not array, no display id', () => { + onCompletedNotification({ + data: { + report: { + ...mockReport, + displayId: null, + }, + reports: 'Reports!', + }, + name: EMAIL_ACTIONS.APPROVED, + }, { notNull: true }); + + expect(logger.info).toHaveBeenCalledWith('Successfully sent reportApproved notification for [object Object]'); + }); + it('logs if result is good, many reports', () => { + onCompletedNotification({ + data: { + reports: [mockReport], + }, + name: EMAIL_ACTIONS.APPROVED, + }, { notNull: true }); + + expect(logger.info).toHaveBeenCalledWith('Successfully sent reportApproved notification for mockReport-1'); + }); + it('log if result is null, many reports', () => { + onCompletedNotification({ + data: { + reports: [mockReport], + }, + name: EMAIL_ACTIONS.APPROVED, + }, null); + + expect(logger.info).toHaveBeenCalledWith('Did not send reportApproved notification for mockReport-1 preferences are not set or marked as "no-send"'); + }); + }); + describe('Changes requested by manager', () => { it('Tests that an email is sent', async () => { process.env.SEND_NOTIFICATIONS = 'true'; @@ -990,7 +1081,7 @@ describe('mailer tests', () => { report, [mockCollaborator1, mockCollaborator2], ); - expect(logger.error).toHaveBeenCalledWith(new Error('Christmas present!')); + expect(auditLogger.error).toHaveBeenCalledWith(new Error('Christmas present!')); }); it('"approver assigned" on the notificationQueue', async () => { @@ -1013,7 +1104,7 @@ describe('mailer tests', () => { const report = await ActivityReport.create(reportObject); approverAssignedNotification(report, [mockApprover]); - expect(logger.error).toHaveBeenCalledWith(new Error('Something is not right')); + expect(auditLogger.error).toHaveBeenCalledWith(new Error('Something is not right')); }); it('"report approved" on the notificationQueue', async () => { @@ -1033,7 +1124,7 @@ describe('mailer tests', () => { const report = await ActivityReport.create(reportObject); reportApprovedNotification(report); - expect(logger.error).toHaveBeenCalledWith(new Error('Something is not right')); + expect(auditLogger.error).toHaveBeenCalledWith(new Error('Something is not right')); }); it('"changes requested" on the notificationQueue', async () => { @@ -1053,7 +1144,7 @@ describe('mailer tests', () => { const report = await ActivityReport.create(reportObject); changesRequestedNotification(report); - expect(logger.error).toHaveBeenCalledWith(new Error('Christmas present!')); + expect(auditLogger.error).toHaveBeenCalledWith(new Error('Christmas present!')); }); it('"collaborator added" digest on the notificationDigestQueue', async () => { @@ -1164,7 +1255,7 @@ describe('mailer tests', () => { }); beforeEach(() => { notificationQueueMock.add.mockClear(); - logger.error.mockClear(); + auditLogger.error.mockClear(); process.env.CI = ''; }); afterEach(() => { @@ -1193,14 +1284,14 @@ describe('mailer tests', () => { userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); await trSessionCreated(); expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); - expect(logger.error).toHaveBeenCalledTimes(1); + expect(auditLogger.error).toHaveBeenCalledTimes(1); }); it('trSessionCreated early return on CI', async () => { process.env.CI = 'true'; userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); await trSessionCreated(mockEvent); expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); - expect(logger.error).toHaveBeenCalledTimes(0); + expect(auditLogger.error).toHaveBeenCalledTimes(0); }); it('trCollaboratorAdded success', async () => { userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); @@ -1218,14 +1309,14 @@ describe('mailer tests', () => { userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); await trCollaboratorAdded(); expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); - expect(logger.error).toHaveBeenCalledTimes(1); + expect(auditLogger.error).toHaveBeenCalledTimes(1); }); it('trCollaboratorAdded early return', async () => { process.env.CI = 'true'; userById.mockImplementation(() => Promise.resolve({ email: 'user@user.com' })); await trCollaboratorAdded(); expect(notificationQueueMock.add).toHaveBeenCalledTimes(0); - expect(logger.error).toHaveBeenCalledTimes(0); + expect(auditLogger.error).toHaveBeenCalledTimes(0); }); }); diff --git a/src/migrations/20241031203552-GrantRelationshipToActive.js b/src/migrations/20241031203552-GrantRelationshipToActive.js index 0a757d67ac..222cd1bfa4 100644 --- a/src/migrations/20241031203552-GrantRelationshipToActive.js +++ b/src/migrations/20241031203552-GrantRelationshipToActive.js @@ -54,10 +54,6 @@ module.exports = { ORDER BY 2,3 WITH DATA; - ALTER TABLE IF EXISTS public."GrantRelationshipToActive" - OWNER TO postgres; - - CREATE INDEX "idx_GrantRelationshipToActive_grantId_activeGrantId" ON public."GrantRelationshipToActive" USING btree ("grantId", "activeGrantId"); diff --git a/src/migrations/20241112161634-add-regional-program-director-role.js b/src/migrations/20241112161634-add-regional-program-director-role.js new file mode 100644 index 0000000000..6d46c4d3ea --- /dev/null +++ b/src/migrations/20241112161634-add-regional-program-director-role.js @@ -0,0 +1,21 @@ +const { prepMigration } = require('../lib/migration'); + +module.exports = { + up: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + await queryInterface.sequelize.query(` + INSERT INTO "Roles" ("name", "fullName", "isSpecialist", "createdAt", "updatedAt") VALUES ('RPD', 'Regional Program Director', false, now(), now()); + `, { transaction }); + }, + ), + + down: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + await queryInterface.sequelize.query(` + DELETE FROM "Roles" WHERE "name" = 'RPD'; + `, { transaction }); + }, + ), +}; diff --git a/src/migrations/20241113165852-add-monitoring-goal-template.js b/src/migrations/20241113165852-add-monitoring-goal-template.js new file mode 100644 index 0000000000..dbb2dfc5ec --- /dev/null +++ b/src/migrations/20241113165852-add-monitoring-goal-template.js @@ -0,0 +1,46 @@ +const { prepMigration } = require('../lib/migration'); + +const goalText = '(Monitoring) The recipient will develop and implement a QIP/CAP to address monitoring findings.'; +module.exports = { + up: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + // Add monitor goal template. + await queryInterface.sequelize.query( + `INSERT INTO "GoalTemplates" ( + hash, + "templateName", + "regionId", + "creationMethod", + "createdAt", + "updatedAt", + "lastUsed", + "templateNameModifiedAt" + ) Values ( + MD5(TRIM('${goalText}')), + '${goalText}', + null, + 'Curated'::"enum_GoalTemplates_creationMethod", + current_timestamp, + current_timestamp, + NULL, + current_timestamp + );`, + { transaction }, + ); + }, + ), + + down: async (queryInterface) => queryInterface.sequelize.transaction( + async (transaction) => { + await prepMigration(queryInterface, transaction, __filename); + await queryInterface.sequelize.query( + `DELETE FROM "GoalTemplates" + WHERE hash = MD5(TRIM('${goalText}')) + AND "creationMethod" = 'Curated'::"enum_GoalTemplates_creationMethod"; + `, + { transaction }, + ); + }, + ), +}; diff --git a/src/migrations/20241113171838-add-system-generated-created-via-enum.js b/src/migrations/20241113171838-add-system-generated-created-via-enum.js new file mode 100644 index 0000000000..00633d50cd --- /dev/null +++ b/src/migrations/20241113171838-add-system-generated-created-via-enum.js @@ -0,0 +1,34 @@ +const { prepMigration, dropAndRecreateEnum } = require('../lib/migration'); +const { CREATION_METHOD } = require('../constants'); + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface) { + await queryInterface.sequelize.transaction(async (transaction) => { + await dropAndRecreateEnum( + queryInterface, + transaction, + 'enum_GoalTemplates_creationMethod', + 'GoalTemplates', + 'creationMethod', + [CREATION_METHOD.AUTOMATIC, CREATION_METHOD.CURATED, CREATION_METHOD.SYSTEM_GENERATED], + 'text', + false, + ); + + // Update the creationMethod for the goal template with the monitoring goal name. + const monitoringTemplateName = '(Monitoring) The recipient will develop and implement a QIP/CAP to address monitoring findings.'; + + // Update the creationMethod to SYSTEM_CREATED for the monitoring goal template. + await queryInterface.sequelize.query(` + UPDATE "GoalTemplates" + SET "creationMethod" = '${CREATION_METHOD.SYSTEM_GENERATED}'::"enum_GoalTemplates_creationMethod" + WHERE "templateName" = '${monitoringTemplateName}'; + `, { transaction }); + }); + }, + + async down() { + // no rollbacks + }, +}; diff --git a/src/migrations/20241114000000-format-comlog-comdate-history.js b/src/migrations/20241114000000-format-comlog-comdate-history.js new file mode 100644 index 0000000000..230eabbe5c --- /dev/null +++ b/src/migrations/20241114000000-format-comlog-comdate-history.js @@ -0,0 +1,83 @@ +const { prepMigration } = require('../lib/migration'); + +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface) { + await queryInterface.sequelize.transaction(async (transaction) => { + const sessionSig = __filename; + await prepMigration(queryInterface, transaction, sessionSig); + return queryInterface.sequelize.query(` + -- This reformats all historical communicationDate values to mm/dd/yyyy + -- + -- Assumptions of preexisting data: + -- -always month-day-year + -- -always separated by a slash, period, or space [/. ] + -- -if there is an extra separator it impacts the year + -- -if the third position is at least two characters, it's the year, else it's the fourth position + -- -the first four characters of a long year string holds the year + -- + -- These assumptions are based on the data we have, so aren't guaranteed to be correct if this is rerun. + -- However the logic is slightly overengineered for extra robustness if new errors show up. At the time of + -- writing, this produces all valid strings where to_date(data->>'communicationDate','mm/dd/yyyy') succeeds + + DROP TABLE IF EXISTS comdate_corrections; + CREATE TEMP TABLE comdate_corrections + AS + WITH reseparated AS ( + SELECT + id clid, + data->>'communicationDate' orig, + -- replace [-. ] seperators (only - has been seen) with / so the subsequent logic always works + regexp_replace(data->>'communicationDate','[-. ]','/','g') reseparated + FROM "CommunicationLogs" + WHERE data->>'communicationDate' !~ '^\\d{2}/\\d{2}/\\d{4}$' + AND COALESCE(data->>'communicationDate','') != '' + ), + date_particles AS ( + SELECT + clid, + orig, + SPLIT_PART(reseparated,'/',1) month_part, + SPLIT_PART(reseparated,'/',2) day_part, + -- check where the year part is because sometimes separators between day and year are doubled + CASE + WHEN LENGTH(SPLIT_PART(reseparated,'/',3)) > 1 THEN SPLIT_PART(reseparated,'/',3) + ELSE SPLIT_PART(reseparated,'/',4) + END AS year_part + FROM reseparated + ), + padded_particles AS ( + SELECT + clid, + orig, + LPAD(month_part,2,'0') padded_month, + LPAD(day_part,2,'0') padded_day, + -- pull out only the leftmost 4 characters, but pad them with the century if we only two chars + LPAD( + LEFT(year_part,4), + 4, + '20' + ) padded_year + FROM date_particles + ) + SELECT + clid, + orig, + padded_month || '/' || padded_day || '/' || padded_year reformat + FROM padded_particles + ; + + UPDATE "CommunicationLogs" + SET data = jsonb_set(data, '{communicationDate}', to_jsonb(reformat)) + FROM comdate_corrections + WHERE id = clid + ; + + `); + }); + }, + + async down() { + // no rollbacks + }, +}; diff --git a/src/queries/api/dashboards/qa/class.sql b/src/queries/api/dashboards/qa/class.sql index 39c066f342..62e666e93d 100644 --- a/src/queries/api/dashboards/qa/class.sql +++ b/src/queries/api/dashboards/qa/class.sql @@ -332,7 +332,7 @@ DECLARE BEGIN --------------------------------------------------------------------------------------------------- -- Step 0.1: make a table to hold applied filters - -- DROP TABLE IF EXISTS process_log; + DROP TABLE IF EXISTS process_log; CREATE TEMP TABLE IF NOT EXISTS process_log( action TEXT, record_cnt int, @@ -340,21 +340,23 @@ BEGIN ); --------------------------------------------------------------------------------------------------- -- Step 1.1: Seed filtered_grants - -- DROP TABLE IF EXISTS filtered_grants; + DROP TABLE IF EXISTS filtered_grants; CREATE TEMP TABLE IF NOT EXISTS filtered_grants (id INT); WITH seed_filtered_grants AS ( - INSERT INTO filtered_grants (id) - SELECT DISTINCT id - FROM "Grants" - WHERE COALESCE(deleted, false) = false - ORDER BY 1 - RETURNING id + INSERT INTO filtered_grants (id) + SELECT + id + FROM "Grants" + WHERE COALESCE(deleted, false) = false + GROUP BY 1 + ORDER BY 1 + RETURNING id ) INSERT INTO process_log (action, record_cnt) SELECT - 'Seed filtered_grants' AS action, - COUNT(*) + 'Seed filtered_grants' AS action, + COUNT(*) FROM seed_filtered_grants GROUP BY 1; --------------------------------------------------------------------------------------------------- @@ -422,22 +424,23 @@ BEGIN COALESCE(region_ids_filter, '[]')::jsonb @> to_jsonb(gr."regionId")::jsonb ) ) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afogr - WHERE fgr.id = afogr.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afogr + WHERE fgr.id = afogr.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -483,22 +486,23 @@ BEGIN WHERE 1 = 1 -- Continued Filter for group if ssdi.group is defined from left joined table above AND (group_filter IS NULL OR (g.id IS NOT NULL AND (gc.id IS NOT NULL OR g."sharedWith" = 'Everyone'))) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afogr - WHERE fgr.id = afogr.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afogr + WHERE fgr.id = afogr.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -617,21 +621,22 @@ BEGIN != domain_instructional_support_not_filter ) ) + ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afogr - WHERE fgr.id = afogr.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afogr + WHERE fgr.id = afogr.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -642,18 +647,20 @@ BEGIN END IF; --------------------------------------------------------------------------------------------------- -- Step 2.1: Seed filtered_goals using filtered_grants - -- DROP TABLE IF EXISTS filtered_goals; + DROP TABLE IF EXISTS filtered_goals; CREATE TEMP TABLE IF NOT EXISTS filtered_goals (id INT); WITH seed_filtered_goals AS ( INSERT INTO filtered_goals (id) - SELECT DISTINCT g.id + SELECT + g.id FROM "Goals" g JOIN filtered_grants fgr ON g."grantId" = fgr.id WHERE g."deletedAt" IS NULL AND g."mapsToParentGoalId" IS NULL - ORDER BY g.id -- Add ORDER BY here + GROUP BY 1 + ORDER BY 1 RETURNING id ) INSERT INTO process_log (action, record_cnt) @@ -671,7 +678,7 @@ BEGIN THEN WITH applied_filtered_goals AS ( - SELECT DISTINCT + SELECT g.id FROM filtered_goals fg JOIN "Goals" g @@ -705,38 +712,39 @@ BEGIN ) != goal_status_not_filter ) ) + GROUP BY 1 + ORDER BY 1 ), applied_filtered_out_goals AS ( - SELECT - fg.id - FROM filtered_goals fg - LEFT JOIN applied_filtered_goals afg - ON fg.id = afg.id - WHERE afg.id IS NULL - ORDER BY 1 + SELECT + fg.id + FROM filtered_goals fg + LEFT JOIN applied_filtered_goals afg ON fg.id = afg.id + GROUP BY 1 + HAVING COUNT(afg.id) = 0 + ORDER BY 1 ), delete_from_goal_filter AS ( - DELETE FROM filtered_goals fg - USING applied_filtered_out_goals afog - WHERE fg.id = afog.id - RETURNING fg.id + DELETE FROM filtered_goals fg + USING applied_filtered_out_goals afog + WHERE fg.id = afog.id + RETURNING fg.id ), applied_filtered_out_grants AS ( - SELECT - fgr.id - FROM filtered_grants fgr - LEFT JOIN "Goals" g - ON fgr.id = g."grantId" - LEFT JOIN filtered_goals fg - ON g.id = fg.id - WHERE fg.id IS NULL - ORDER BY 1 + SELECT + fgr.id + FROM filtered_grants fgr + LEFT JOIN "Goals" g ON fgr.id = g."grantId" + LEFT JOIN filtered_goals fg ON g.id = fg.id + GROUP BY 1 + HAVING COUNT(fg.id) = 0 + ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afog - WHERE fgr.id = afog.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afog + WHERE fgr.id = afog.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT diff --git a/src/queries/api/dashboards/qa/dashboard.sql b/src/queries/api/dashboards/qa/dashboard.sql index a08ef7d299..c4b1777508 100644 --- a/src/queries/api/dashboards/qa/dashboard.sql +++ b/src/queries/api/dashboards/qa/dashboard.sql @@ -370,7 +370,7 @@ DECLARE BEGIN --------------------------------------------------------------------------------------------------- -- Step 0.1: make a table to hold applied filters - -- DROP TABLE IF EXISTS process_log; + DROP TABLE IF EXISTS process_log; CREATE TEMP TABLE IF NOT EXISTS process_log( action TEXT, record_cnt int, @@ -378,21 +378,23 @@ BEGIN ); --------------------------------------------------------------------------------------------------- -- Step 1.1: Seed filtered_grants - -- DROP TABLE IF EXISTS filtered_grants; + DROP TABLE IF EXISTS filtered_grants; CREATE TEMP TABLE IF NOT EXISTS filtered_grants (id INT); WITH seed_filtered_grants AS ( - INSERT INTO filtered_grants (id) - SELECT DISTINCT id - FROM "Grants" - WHERE COALESCE(deleted, false) = false - ORDER BY 1 - RETURNING id + INSERT INTO filtered_grants (id) + SELECT + id + FROM "Grants" + WHERE COALESCE(deleted, false) = false + GROUP BY 1 + ORDER BY 1 + RETURNING id ) INSERT INTO process_log (action, record_cnt) SELECT - 'Seed filtered_grants' AS action, - COUNT(*) + 'Seed filtered_grants' AS action, + COUNT(*) FROM seed_filtered_grants GROUP BY 1; --------------------------------------------------------------------------------------------------- @@ -460,22 +462,23 @@ BEGIN COALESCE(region_ids_filter, '[]')::jsonb @> to_jsonb(gr."regionId")::jsonb ) ) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afogr - WHERE fgr.id = afogr.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afogr + WHERE fgr.id = afogr.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -521,22 +524,23 @@ BEGIN WHERE 1 = 1 -- Continued Filter for group if ssdi.group is defined from left joined table above AND (group_filter IS NULL OR (g.id IS NOT NULL AND (gc.id IS NOT NULL OR g."sharedWith" = 'Everyone'))) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afogr - WHERE fgr.id = afogr.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afogr + WHERE fgr.id = afogr.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -547,24 +551,26 @@ BEGIN END IF; --------------------------------------------------------------------------------------------------- -- Step 2.1: Seed filtered_goals using filtered_grants - -- DROP TABLE IF EXISTS filtered_goals; + DROP TABLE IF EXISTS filtered_goals; CREATE TEMP TABLE IF NOT EXISTS filtered_goals (id INT); WITH seed_filtered_goals AS ( INSERT INTO filtered_goals (id) - SELECT DISTINCT g.id + SELECT + g.id FROM "Goals" g JOIN filtered_grants fgr ON g."grantId" = fgr.id WHERE g."deletedAt" IS NULL AND g."mapsToParentGoalId" IS NULL - ORDER BY g.id -- Add ORDER BY here + GROUP BY 1 + ORDER BY 1 RETURNING id ) INSERT INTO process_log (action, record_cnt) SELECT - 'Seed filtered_goals' AS action, - COUNT(*) + 'Seed filtered_goals' AS action, + COUNT(*) FROM seed_filtered_goals GROUP BY 1; --------------------------------------------------------------------------------------------------- @@ -576,7 +582,7 @@ BEGIN THEN WITH applied_filtered_goals AS ( - SELECT DISTINCT + SELECT g.id FROM filtered_goals fg JOIN "Goals" g @@ -613,38 +619,39 @@ BEGIN WHERE 1 = 1 -- Continued Filter for activityReportGoalResponse if ssdi.activityReportGoalResponse is defined, for array columns AND (activity_report_goal_response_filter IS NULL OR gfr.id IS NOT NULL) + GROUP BY 1 + ORDER BY 1 ), applied_filtered_out_goals AS ( - SELECT - fg.id - FROM filtered_goals fg - LEFT JOIN applied_filtered_goals afg - ON fg.id = afg.id - WHERE afg.id IS NULL - ORDER BY 1 + SELECT + fg.id + FROM filtered_goals fg + LEFT JOIN applied_filtered_goals afg ON fg.id = afg.id + GROUP BY 1 + HAVING COUNT(afg.id) = 0 + ORDER BY 1 ), delete_from_goal_filter AS ( - DELETE FROM filtered_goals fg - USING applied_filtered_out_goals afog - WHERE fg.id = afog.id - RETURNING fg.id + DELETE FROM filtered_goals fg + USING applied_filtered_out_goals afog + WHERE fg.id = afog.id + RETURNING fg.id ), applied_filtered_out_grants AS ( - SELECT - fgr.id - FROM filtered_grants fgr - LEFT JOIN "Goals" g - ON fgr.id = g."grantId" - LEFT JOIN filtered_goals fg - ON g.id = fg.id - WHERE fg.id IS NULL - ORDER BY 1 + SELECT + fgr.id + FROM filtered_grants fgr + LEFT JOIN "Goals" g ON fgr.id = g."grantId" + LEFT JOIN filtered_goals fg ON g.id = fg.id + GROUP BY 1 + HAVING COUNT(fg.id) = 0 + ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afog - WHERE fgr.id = afog.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afog + WHERE fgr.id = afog.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -661,31 +668,32 @@ BEGIN END IF; --------------------------------------------------------------------------------------------------- -- Step 3.1: Seed filterd_activity_reports - -- DROP TABLE IF EXISTS filtered_activity_reports; + DROP TABLE IF EXISTS filtered_activity_reports; CREATE TEMP TABLE IF NOT EXISTS filtered_activity_reports (id INT); WITH seed_filtered_activity_reports AS ( - INSERT INTO filtered_activity_reports (id) - SELECT DISTINCT - a.id - FROM "ActivityReports" a - JOIN "ActivityRecipients" ar - ON a.id = ar."activityReportId" - JOIN filtered_grants fgr - ON ar."grantId" = fgr.id - JOIN "ActivityReportGoals" arg - ON a.id = arg."activityReportId" - JOIN filtered_goals fg - ON arg."goalId" = fg.id - WHERE a."calculatedStatus" = 'approved' - ORDER BY a.id -- Add ORDER BY here - RETURNING + INSERT INTO filtered_activity_reports (id) + SELECT + a.id + FROM "ActivityReports" a + JOIN "ActivityRecipients" ar + ON a.id = ar."activityReportId" + JOIN filtered_grants fgr + ON ar."grantId" = fgr.id + JOIN "ActivityReportGoals" arg + ON a.id = arg."activityReportId" + JOIN filtered_goals fg + ON arg."goalId" = fg.id + WHERE a."calculatedStatus" = 'approved' + GROUP BY 1 + ORDER BY 1 + RETURNING id ) INSERT INTO process_log (action, record_cnt) SELECT - 'Seed filtered_activity_reports' AS action, - COUNT(*) + 'Seed filtered_activity_reports' AS action, + COUNT(*) FROM seed_filtered_activity_reports GROUP BY 1; --------------------------------------------------------------------------------------------------- @@ -782,55 +790,55 @@ BEGIN ) != tta_type_not_filter ) ) + GROUP BY 1 + ORDER BY 1 ), applied_filtered_out_activity_reports AS ( - SELECT - fa.id - FROM filtered_activity_reports fa - LEFT JOIN applied_filtered_activity_reports afa - ON fa.id = afa."activityReportId" - WHERE afa."activityReportId" IS NULL - ORDER BY 1 + SELECT + fa.id + FROM filtered_activity_reports fa + LEFT JOIN applied_filtered_activity_reports afa ON fa.id = afa."activityReportId" + GROUP BY 1 + HAVING COUNT(afa."activityReportId") = 0 + ORDER BY 1 ), delete_from_activity_report_filter AS ( - DELETE FROM filtered_activity_reports fa - USING applied_filtered_out_activity_reports afaoar - WHERE fa.id = afaoar.id - RETURNING fa.id + DELETE FROM filtered_activity_reports fa + USING applied_filtered_out_activity_reports afaoar + WHERE fa.id = afaoar.id + RETURNING fa.id ), applied_filtered_out_goals AS ( - SELECT - fg.id - FROM filtered_goals fg - LEFT JOIN "ActivityReportGoals" arg - ON fg.id = arg."goalId" - LEFT JOIN filtered_activity_reports fa - ON arg."activityReportId" = fa.id - WHERE fa.id IS NULL - ORDER BY 1 + SELECT + fg.id + FROM filtered_goals fg + LEFT JOIN "ActivityReportGoals" arg ON fg.id = arg."goalId" + LEFT JOIN filtered_activity_reports fa ON arg."activityReportId" = fa.id + GROUP BY 1 + HAVING COUNT(fa.id) = 0 + ORDER BY 1 ), delete_from_goal_filter AS ( - DELETE FROM filtered_goals fg - USING applied_filtered_out_goals afog - WHERE fg.id = afog.id - RETURNING fg.id + DELETE FROM filtered_goals fg + USING applied_filtered_out_goals afog + WHERE fg.id = afog.id + RETURNING fg.id ), applied_filtered_out_grants AS ( - SELECT - fgr.id - FROM filtered_grants fgr - LEFT JOIN "Goals" g - ON fgr.id = g."grantId" - LEFT JOIN filtered_goals fg - ON g.id = fg.id - WHERE fg.id IS NULL - ORDER BY 1 + SELECT + fgr.id + FROM filtered_grants fgr + LEFT JOIN "Goals" g ON fgr.id = g."grantId" + LEFT JOIN filtered_goals fg ON g.id = fg.id + GROUP BY 1 + HAVING COUNT(fg.id) = 0 + ORDER BY 1 ), delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afog - WHERE fgr.id = afog.id - RETURNING fgr.id + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afog + WHERE fgr.id = afog.id + RETURNING fgr.id ) INSERT INTO process_log (action, record_cnt) SELECT @@ -859,113 +867,113 @@ BEGIN THEN WITH applied_filtered_activity_reports AS ( - SELECT DISTINCT - a.id "activityReportId" - FROM filtered_activity_reports fa - JOIN "ActivityReports" a - ON fa.id = a.id - JOIN "ActivityReportGoals" arg - ON a.id = arg."activityReportId" - JOIN filtered_goals fg - ON arg."goalId" = fg.id - JOIN "ActivityReportObjectives" aro - ON a.id = aro."activityReportId" - JOIN "Objectives" o - ON aro."objectiveId" = o.id - AND arg."goalId" = o."goalId" - JOIN "ActivityReportObjectiveTopics" arot - ON aro.id = arot."activityReportObjectiveId" - JOIN "Topics" t - ON arot."topicId" = t.id - JOIN "NextSteps" ns - ON a.id = ns."activityReportId" - WHERE 1 = 1 - -- Filter for reportText if ssdi.reportText is defined - AND ( - report_text_filter IS NULL - OR ( - EXISTS ( - SELECT 1 - FROM json_array_elements_text(COALESCE(report_text_filter, '[]')::json) AS value - WHERE CONCAT(a.context, '\n', arg.name, '\n', aro.title, '\n', aro."ttaProvided", '\n', ns.note) ~* value::text - ) != report_text_not_filter + SELECT + a.id "activityReportId" + FROM filtered_activity_reports fa + JOIN "ActivityReports" a + ON fa.id = a.id + JOIN "ActivityReportGoals" arg + ON a.id = arg."activityReportId" + JOIN filtered_goals fg + ON arg."goalId" = fg.id + JOIN "ActivityReportObjectives" aro + ON a.id = aro."activityReportId" + JOIN "Objectives" o + ON aro."objectiveId" = o.id + AND arg."goalId" = o."goalId" + JOIN "ActivityReportObjectiveTopics" arot + ON aro.id = arot."activityReportObjectiveId" + JOIN "Topics" t + ON arot."topicId" = t.id + JOIN "NextSteps" ns + ON a.id = ns."activityReportId" + WHERE 1 = 1 + -- Filter for reportText if ssdi.reportText is defined + AND ( + report_text_filter IS NULL + OR ( + EXISTS ( + SELECT 1 + FROM json_array_elements_text(COALESCE(report_text_filter, '[]')::json) AS value + WHERE CONCAT(a.context, '\n', arg.name, '\n', aro.title, '\n', aro."ttaProvided", '\n', ns.note) ~* value::text + ) != report_text_not_filter + ) ) - ) - -- Filter for topic if ssdi.topic is defined - AND ( - topic_filter IS NULL - OR ( - COALESCE(topic_filter, '[]')::jsonb @> to_jsonb(t.name) != topic_not_filter + -- Filter for topic if ssdi.topic is defined + AND ( + topic_filter IS NULL + OR ( + COALESCE(topic_filter, '[]')::jsonb @> to_jsonb(t.name) != topic_not_filter + ) ) - ) + GROUP BY 1 + ORDER BY 1 ), - applied_filtered_out_activity_reports AS ( - SELECT - fa.id - FROM filtered_activity_reports fa - LEFT JOIN applied_filtered_activity_reports afa - ON fa.id = afa."activityReportId" - WHERE afa."activityReportId" IS NULL - ORDER BY 1 - ), - delete_from_activity_report_filter AS ( - DELETE FROM filtered_activity_reports fa - USING applied_filtered_out_activity_reports afaoar - WHERE fa.id = afaoar.id - RETURNING fa.id - ), - applied_filtered_out_goals AS ( - SELECT - fg.id - FROM filtered_goals fg - LEFT JOIN "ActivityReportGoals" arg - ON fg.id = arg."goalId" - LEFT JOIN filtered_activity_reports fa - ON arg."activityReportId" = fa.id - WHERE fa.id IS NULL - ORDER BY 1 - ), - delete_from_goal_filter AS ( - DELETE FROM filtered_goals fg - USING applied_filtered_out_goals afog - WHERE fg.id = afog.id - RETURNING fg.id - ), - applied_filtered_out_grants AS ( - SELECT - fgr.id - FROM filtered_grants fgr - LEFT JOIN "Goals" g - ON fgr.id = g."grantId" - LEFT JOIN filtered_goals fg - ON g.id = fg.id - WHERE fg.id IS NULL - ORDER BY 1 - ), - delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afog - WHERE fgr.id = afog.id - RETURNING fgr.id - ) - INSERT INTO process_log (action, record_cnt) - SELECT - 'Apply Activity Report Filters' AS action, - COUNT(*) - FROM delete_from_activity_report_filter - GROUP BY 1 - UNION - SELECT - 'Apply Activity Report Filters To Goals' AS action, - COUNT(*) - FROM delete_from_goal_filter - GROUP BY 1 - UNION - SELECT - 'Apply Activity Report Filters To Grants' AS action, - COUNT(*) - FROM delete_from_grant_filter - GROUP BY 1; + applied_filtered_out_activity_reports AS ( + SELECT + fa.id + FROM filtered_activity_reports fa + LEFT JOIN applied_filtered_activity_reports afa ON fa.id = afa."activityReportId" + GROUP BY 1 + HAVING COUNT(afa."activityReportId") = 0 + ORDER BY 1 + ), + delete_from_activity_report_filter AS ( + DELETE FROM filtered_activity_reports fa + USING applied_filtered_out_activity_reports afaoar + WHERE fa.id = afaoar.id + RETURNING fa.id + ), + applied_filtered_out_goals AS ( + SELECT + fg.id + FROM filtered_goals fg + LEFT JOIN "ActivityReportGoals" arg ON fg.id = arg."goalId" + LEFT JOIN filtered_activity_reports fa ON arg."activityReportId" = fa.id + GROUP BY 1 + HAVING COUNT(fa.id) = 0 + ORDER BY 1 + ), + delete_from_goal_filter AS ( + DELETE FROM filtered_goals fg + USING applied_filtered_out_goals afog + WHERE fg.id = afog.id + RETURNING fg.id + ), + applied_filtered_out_grants AS ( + SELECT + fgr.id + FROM filtered_grants fgr + LEFT JOIN "Goals" g ON fgr.id = g."grantId" + LEFT JOIN filtered_goals fg ON g.id = fg.id + GROUP BY 1 + HAVING COUNT(fg.id) = 0 + ORDER BY 1 + ), + delete_from_grant_filter AS ( + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afog + WHERE fgr.id = afog.id + RETURNING fgr.id + ) + INSERT INTO process_log (action, record_cnt) + SELECT + 'Apply Activity Report Filters' AS action, + COUNT(*) + FROM delete_from_activity_report_filter + GROUP BY 1 + UNION + SELECT + 'Apply Activity Report Filters To Goals' AS action, + COUNT(*) + FROM delete_from_goal_filter + GROUP BY 1 + UNION + SELECT + 'Apply Activity Report Filters To Grants' AS action, + COUNT(*) + FROM delete_from_grant_filter + GROUP BY 1; END IF; --------------------------------------------------------------------------------------------------- -- Step 3.2: If activity reports filters (set 3), delete from filtered_activity_reports for any activity reports filtered, delete from filtered_goals using filterd_activity_reports, delete from filtered_grants using filtered_goals @@ -974,7 +982,7 @@ BEGIN THEN WITH applied_filtered_activity_reports AS ( - SELECT DISTINCT + SELECT a.id "activityReportId" FROM filtered_activity_reports fa JOIN "ActivityReports" a @@ -997,75 +1005,72 @@ BEGIN ('["single-recipient"]'::jsonb @> COALESCE(recipient_single_or_multi_filter, '[]')::jsonb) ) != recipient_single_or_multi_not_filter ) - ), - applied_filtered_out_activity_reports AS ( - SELECT - fa.id - FROM filtered_activity_reports fa - LEFT JOIN applied_filtered_activity_reports afa - ON fa.id = afa."activityReportId" - WHERE afa."activityReportId" IS NULL - ORDER BY 1 - ), - delete_from_activity_report_filter AS ( - DELETE FROM filtered_activity_reports fa - USING applied_filtered_out_activity_reports afaoar - WHERE fa.id = afaoar.id - RETURNING fa.id - ), - applied_filtered_out_goals AS ( - SELECT - fg.id - FROM filtered_goals fg - LEFT JOIN "ActivityReportGoals" arg - ON fg.id = arg."goalId" - LEFT JOIN filtered_activity_reports fa - ON arg."activityReportId" = fa.id - WHERE fa.id IS NULL - ORDER BY 1 - ), - delete_from_goal_filter AS ( - DELETE FROM filtered_goals fg - USING applied_filtered_out_goals afog - WHERE fg.id = afog.id - RETURNING fg.id - ), - applied_filtered_out_grants AS ( - SELECT - fgr.id - FROM filtered_grants fgr - LEFT JOIN "Goals" g - ON fgr.id = g."grantId" - LEFT JOIN filtered_goals fg - ON g.id = fg.id - WHERE fg.id IS NULL - ORDER BY 1 - ), - delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afog - WHERE fgr.id = afog.id - RETURNING fgr.id - ) - INSERT INTO process_log (action, record_cnt) - SELECT - 'Apply Activity Report Filters' AS action, - COUNT(*) - FROM delete_from_activity_report_filter - GROUP BY 1 - UNION - SELECT - 'Apply Activity Report Filters To Goals' AS action, - COUNT(*) - FROM delete_from_goal_filter - GROUP BY 1 - UNION - SELECT - 'Apply Activity Report Filters To Grants' AS action, - COUNT(*) - FROM delete_from_grant_filter - GROUP BY 1; + applied_filtered_out_activity_reports AS ( + SELECT + fa.id + FROM filtered_activity_reports fa + LEFT JOIN applied_filtered_activity_reports afa ON fa.id = afa."activityReportId" + GROUP BY 1 + HAVING COUNT(afa."activityReportId") = 0 + ORDER BY 1 + ), + delete_from_activity_report_filter AS ( + DELETE FROM filtered_activity_reports fa + USING applied_filtered_out_activity_reports afaoar + WHERE fa.id = afaoar.id + RETURNING fa.id + ), + applied_filtered_out_goals AS ( + SELECT + fg.id + FROM filtered_goals fg + LEFT JOIN "ActivityReportGoals" arg ON fg.id = arg."goalId" + LEFT JOIN filtered_activity_reports fa ON arg."activityReportId" = fa.id + GROUP BY 1 + HAVING COUNT(fa.id) = 0 + ORDER BY 1 + ), + delete_from_goal_filter AS ( + DELETE FROM filtered_goals fg + USING applied_filtered_out_goals afog + WHERE fg.id = afog.id + RETURNING fg.id + ), + applied_filtered_out_grants AS ( + SELECT + fgr.id + FROM filtered_grants fgr + LEFT JOIN "Goals" g ON fgr.id = g."grantId" + LEFT JOIN filtered_goals fg ON g.id = fg.id + GROUP BY 1 + HAVING COUNT(fg.id) = 0 + ORDER BY 1 + ), + delete_from_grant_filter AS ( + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afog + WHERE fgr.id = afog.id + RETURNING fgr.id + ) + INSERT INTO process_log (action, record_cnt) + SELECT + 'Apply Activity Report Filters' AS action, + COUNT(*) + FROM delete_from_activity_report_filter + GROUP BY 1 + UNION + SELECT + 'Apply Activity Report Filters To Goals' AS action, + COUNT(*) + FROM delete_from_goal_filter + GROUP BY 1 + UNION + SELECT + 'Apply Activity Report Filters To Grants' AS action, + COUNT(*) + FROM delete_from_grant_filter + GROUP BY 1; END IF; --------------------------------------------------------------------------------------------------- -- Step 3.2: If activity reports filters (set 3), delete from filtered_activity_reports for any activity reports filtered, delete from filtered_goals using filterd_activity_reports, delete from filtered_grants using filtered_goals @@ -1074,7 +1079,7 @@ BEGIN THEN WITH applied_filtered_activity_reports AS ( - SELECT DISTINCT + SELECT a.id "activityReportId" FROM filtered_activity_reports fa JOIN "ActivityReports" a @@ -1099,74 +1104,73 @@ BEGIN ) != roles_not_filter ) GROUP BY 1 + ORDER BY 1 ), - applied_filtered_out_activity_reports AS ( - SELECT - fa.id - FROM filtered_activity_reports fa - LEFT JOIN applied_filtered_activity_reports afa - ON fa.id = afa."activityReportId" - WHERE afa."activityReportId" IS NULL - ORDER BY 1 - ), - delete_from_activity_report_filter AS ( - DELETE FROM filtered_activity_reports fa - USING applied_filtered_out_activity_reports afaoar - WHERE fa.id = afaoar.id - RETURNING fa.id - ), - applied_filtered_out_goals AS ( - SELECT - fg.id - FROM filtered_goals fg - LEFT JOIN "ActivityReportGoals" arg - ON fg.id = arg."goalId" - LEFT JOIN filtered_activity_reports fa - ON arg."activityReportId" = fa.id - WHERE fa.id IS NULL - ORDER BY 1 - ), - delete_from_goal_filter AS ( - DELETE FROM filtered_goals fg - USING applied_filtered_out_goals afog - WHERE fg.id = afog.id - RETURNING fg.id - ), - applied_filtered_out_grants AS ( - SELECT - fgr.id - FROM filtered_grants fgr - LEFT JOIN "Goals" g - ON fgr.id = g."grantId" - LEFT JOIN filtered_goals fg - ON g.id = fg.id - WHERE fg.id IS NULL - ORDER BY 1 - ), - delete_from_grant_filter AS ( - DELETE FROM filtered_grants fgr - USING applied_filtered_out_grants afog - WHERE fgr.id = afog.id - RETURNING fgr.id - ) - INSERT INTO process_log (action, record_cnt) - SELECT - 'Apply Activity Report Filters' AS action, - COUNT(*) - FROM delete_from_activity_report_filter - GROUP BY 1 - UNION - SELECT - 'Apply Activity Report Filters To Goals' AS action, - COUNT(*) - FROM delete_from_goal_filter - GROUP BY 1 - UNION - SELECT - 'Apply Activity Report Filters To Grants' AS action, - COUNT(*) - FROM delete_from_grant_filter - GROUP BY 1; + applied_filtered_out_activity_reports AS ( + SELECT + fa.id + FROM filtered_activity_reports fa + LEFT JOIN applied_filtered_activity_reports afa ON fa.id = afa."activityReportId" + GROUP BY 1 + HAVING COUNT(afa."activityReportId") = 0 + ORDER BY 1 + ), + delete_from_activity_report_filter AS ( + DELETE FROM filtered_activity_reports fa + USING applied_filtered_out_activity_reports afaoar + WHERE fa.id = afaoar.id + RETURNING fa.id + ), + applied_filtered_out_goals AS ( + SELECT + fg.id + FROM filtered_goals fg + LEFT JOIN "ActivityReportGoals" arg ON fg.id = arg."goalId" + LEFT JOIN filtered_activity_reports fa ON arg."activityReportId" = fa.id + GROUP BY 1 + HAVING COUNT(fa.id) = 0 + ORDER BY 1 + ), + delete_from_goal_filter AS ( + DELETE FROM filtered_goals fg + USING applied_filtered_out_goals afog + WHERE fg.id = afog.id + RETURNING fg.id + ), + applied_filtered_out_grants AS ( + SELECT + fgr.id + FROM filtered_grants fgr + LEFT JOIN "Goals" g ON fgr.id = g."grantId" + LEFT JOIN filtered_goals fg ON g.id = fg.id + GROUP BY 1 + HAVING COUNT(fg.id) = 0 + ORDER BY 1 + ), + delete_from_grant_filter AS ( + DELETE FROM filtered_grants fgr + USING applied_filtered_out_grants afog + WHERE fgr.id = afog.id + RETURNING fgr.id + ) + INSERT INTO process_log (action, record_cnt) + SELECT + 'Apply Activity Report Filters' AS action, + COUNT(*) + FROM delete_from_activity_report_filter + GROUP BY 1 + UNION + SELECT + 'Apply Activity Report Filters To Goals' AS action, + COUNT(*) + FROM delete_from_goal_filter + GROUP BY 1 + UNION + SELECT + 'Apply Activity Report Filters To Grants' AS action, + COUNT(*) + FROM delete_from_grant_filter + GROUP BY 1; END IF; --EXCEPTION diff --git a/src/queries/api/dashboards/qa/fei.sql b/src/queries/api/dashboards/qa/fei.sql index 34bb2740b2..2f663d136d 100644 --- a/src/queries/api/dashboards/qa/fei.sql +++ b/src/queries/api/dashboards/qa/fei.sql @@ -285,7 +285,7 @@ DECLARE BEGIN --------------------------------------------------------------------------------------------------- -- Step 0.1: make a table to hold applied filters - -- DROP TABLE IF EXISTS process_log; + DROP TABLE IF EXISTS process_log; CREATE TEMP TABLE IF NOT EXISTS process_log( action TEXT, record_cnt int, @@ -293,14 +293,16 @@ BEGIN ); --------------------------------------------------------------------------------------------------- -- Step 1.1: Seed filtered_grants - -- DROP TABLE IF EXISTS filtered_grants; + DROP TABLE IF EXISTS filtered_grants; CREATE TEMP TABLE IF NOT EXISTS filtered_grants (id INT); WITH seed_filtered_grants AS ( INSERT INTO filtered_grants (id) - SELECT DISTINCT id + SELECT + id FROM "Grants" WHERE COALESCE(deleted, false) = false + GROUP BY 1 ORDER BY 1 RETURNING id ) @@ -375,15 +377,16 @@ BEGIN COALESCE(region_ids_filter, '[]')::jsonb @> to_jsonb(gr."regionId")::jsonb ) ) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( @@ -436,15 +439,16 @@ BEGIN WHERE 1 = 1 -- Continued Filter for group if ssdi.group is defined from left joined table above AND (group_filter IS NULL OR (g.id IS NOT NULL AND (gc.id IS NOT NULL OR g."sharedWith" = 'Everyone'))) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( SELECT fgr.id FROM filtered_grants fgr - LEFT JOIN applied_filtered_grants afgr - ON fgr.id = afgr.id - WHERE afgr.id IS NULL + LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( @@ -467,13 +471,15 @@ BEGIN WITH seed_filtered_goals AS ( INSERT INTO filtered_goals (id) - SELECT DISTINCT g.id + SELECT + g.id FROM "Goals" g JOIN filtered_grants fgr ON g."grantId" = fgr.id WHERE g."deletedAt" IS NULL AND g."mapsToParentGoalId" IS NULL - ORDER BY g.id -- Add ORDER BY here + GROUP BY 1 + ORDER BY 1 RETURNING id ) INSERT INTO process_log (action, record_cnt) @@ -491,7 +497,7 @@ BEGIN THEN WITH applied_filtered_goals AS ( - SELECT DISTINCT + SELECT g.id FROM filtered_goals fg JOIN "Goals" g @@ -538,12 +544,16 @@ BEGIN WHERE 1 = 1 -- Continued Filter for activityReportGoalResponse if ssdi.activityReportGoalResponse is defined, for array columns AND (activity_report_goal_response_filter IS NULL OR gfr.id IS NOT NULL) + GROUP BY 1 + ORDER BY 1 ), applied_filtered_out_goals AS ( - SELECT fg.id + SELECT + fg.id FROM filtered_goals fg LEFT JOIN applied_filtered_goals afg ON fg.id = afg.id - WHERE afg.id IS NULL + GROUP BY 1 + HAVING COUNT(afg.id) = 0 ORDER BY 1 ), delete_from_goal_filter AS ( @@ -553,11 +563,13 @@ BEGIN RETURNING fg.id ), applied_filtered_out_grants AS ( - SELECT fgr.id + SELECT + fgr.id FROM filtered_grants fgr LEFT JOIN "Goals" g ON fgr.id = g."grantId" LEFT JOIN filtered_goals fg ON g.id = fg.id - WHERE fg.id IS NULL + GROUP BY 1 + HAVING COUNT(fg.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( diff --git a/src/queries/api/dashboards/qa/no-tta.sql b/src/queries/api/dashboards/qa/no-tta.sql index 645dd009ba..8416ff41cf 100644 --- a/src/queries/api/dashboards/qa/no-tta.sql +++ b/src/queries/api/dashboards/qa/no-tta.sql @@ -221,7 +221,7 @@ DECLARE BEGIN --------------------------------------------------------------------------------------------------- -- Step 0.1: make a table to hold applied filters - -- DROP TABLE IF EXISTS process_log; + DROP TABLE IF EXISTS process_log; CREATE TEMP TABLE IF NOT EXISTS process_log( action TEXT, record_cnt int, @@ -230,14 +230,16 @@ BEGIN --------------------------------------------------------------------------------------------------- -- Step 1.1: Seed filtered_grants - -- DROP TABLE IF EXISTS filtered_grants; + DROP TABLE IF EXISTS filtered_grants; CREATE TEMP TABLE IF NOT EXISTS filtered_grants (id INT); WITH seed_filtered_grants AS ( INSERT INTO filtered_grants (id) - SELECT DISTINCT id + SELECT + id FROM "Grants" WHERE COALESCE(deleted, false) = false + GROUP BY 1 ORDER BY 1 RETURNING id ) @@ -299,13 +301,16 @@ BEGIN region_ids_filter IS NULL OR COALESCE(region_ids_filter, '[]')::jsonb @> to_jsonb(gr."regionId") != region_ids_not_filter ) + GROUP BY 1 ORDER BY 1 ), applied_filtered_out_grants AS ( - SELECT fgr.id + SELECT + fgr.id FROM filtered_grants fgr LEFT JOIN applied_filtered_grants afgr ON fgr.id = afgr.id - WHERE afgr.id IS NULL + GROUP BY 1 + HAVING COUNT(afgr.id) = 0 ORDER BY 1 ), delete_from_grant_filter AS ( @@ -322,19 +327,21 @@ BEGIN --------------------------------------------------------------------------------------------------- -- Step 3.1: Seed filtered_activity_reports - -- DROP TABLE IF EXISTS filtered_activity_reports; + DROP TABLE IF EXISTS filtered_activity_reports; CREATE TEMP TABLE IF NOT EXISTS filtered_activity_reports (id INT); WITH seed_filtered_activity_reports AS ( INSERT INTO filtered_activity_reports (id) - SELECT DISTINCT a.id + SELECT + a.id FROM "ActivityReports" a JOIN "ActivityRecipients" ar ON a.id = ar."activityReportId" JOIN filtered_grants fgr ON ar."grantId" = fgr.id JOIN "ActivityReportGoals" arg ON a.id = arg."activityReportId" --JOIN filtered_goals fg ON arg."goalId" = fg.id WHERE a."calculatedStatus" = 'approved' - ORDER BY a.id + GROUP BY 1 + ORDER BY 1 RETURNING id ) INSERT INTO process_log (action, record_cnt) @@ -350,7 +357,8 @@ BEGIN THEN WITH applied_filtered_activity_reports AS ( - SELECT a.id + SELECT + a.id FROM filtered_activity_reports fa JOIN "ActivityReports" a ON fa.id = a.id WHERE a."calculatedStatus" = 'approved' @@ -376,12 +384,16 @@ BEGIN FROM json_array_elements_text(COALESCE(end_date_filter, '[]')::json) AS value ) != end_date_not_filter ) + GROUP BY 1 + ORDER BY 1 ), applied_filtered_out_activity_reports AS ( - SELECT fa.id + SELECT + fa.id FROM filtered_activity_reports fa LEFT JOIN applied_filtered_activity_reports afa ON fa.id = afa.id - WHERE afa.id IS NULL + GROUP BY 1 + HAVING COUNT(afa.id) = 0 ORDER BY 1 ), delete_from_activity_report_filter AS ( @@ -399,15 +411,20 @@ BEGIN --------------------------------------------------------------------------------------------------- -- Step 3.3: Update filtered_grants based on the reduced filtered_activity_reports dataset WITH reduced_grants AS ( - SELECT DISTINCT ar."grantId" + SELECT + ar."grantId" FROM filtered_activity_reports fa JOIN "ActivityRecipients" ar ON fa.id = ar."activityReportId" + GROUP BY 1 + ORDER BY 1 ), applied_filtered_out_grants AS ( - SELECT fgr.id + SELECT + fgr.id FROM filtered_grants fgr LEFT JOIN reduced_grants rg ON fgr.id = rg."grantId" - WHERE rg."grantId" IS NULL + GROUP BY 1 + HAVING COUNT(rg."grantId") = 0 ORDER BY 1 ), delete_from_grant_filter AS ( @@ -426,20 +443,21 @@ END $$; --------------------------------------------------------------------------------------------------- -- Final CTEs for dataset generation WITH active_filters_array AS ( - SELECT array_remove(ARRAY[ - CASE WHEN NULLIF(current_setting('ssdi.recipients', true), '') IS NOT NULL THEN 'recipients' END, - CASE WHEN NULLIF(current_setting('ssdi.programType', true), '') IS NOT NULL THEN 'programType' END, - CASE WHEN NULLIF(current_setting('ssdi.grantNumber', true), '') IS NOT NULL THEN 'grantNumber' END, - CASE WHEN NULLIF(current_setting('ssdi.stateCode', true), '') IS NOT NULL THEN 'stateCode' END, - CASE WHEN NULLIF(current_setting('ssdi.region', true), '') IS NOT NULL THEN 'region' END, - CASE WHEN NULLIF(current_setting('ssdi.startDate', true), '') IS NOT NULL THEN 'startDate' END, - CASE WHEN NULLIF(current_setting('ssdi.endDate', true), '') IS NOT NULL THEN 'endDate' END - ], NULL) AS active_filters + SELECT array_remove(ARRAY[ + CASE WHEN NULLIF(current_setting('ssdi.recipients', true), '') IS NOT NULL THEN 'recipients' END, + CASE WHEN NULLIF(current_setting('ssdi.programType', true), '') IS NOT NULL THEN 'programType' END, + CASE WHEN NULLIF(current_setting('ssdi.grantNumber', true), '') IS NOT NULL THEN 'grantNumber' END, + CASE WHEN NULLIF(current_setting('ssdi.stateCode', true), '') IS NOT NULL THEN 'stateCode' END, + CASE WHEN NULLIF(current_setting('ssdi.region', true), '') IS NOT NULL THEN 'region' END, + CASE WHEN NULLIF(current_setting('ssdi.startDate', true), '') IS NOT NULL THEN 'startDate' END, + CASE WHEN NULLIF(current_setting('ssdi.endDate', true), '') IS NOT NULL THEN 'endDate' END + ], NULL) AS active_filters ), no_tta AS ( - SELECT DISTINCT r.id, - COUNT(DISTINCT a.id) != 0 OR COUNT(DISTINCT srp.id) != 0 AS has_tta + SELECT + r.id, + COUNT(DISTINCT a.id) != 0 OR COUNT(DISTINCT srp.id) != 0 AS has_tta FROM "Recipients" r JOIN "Grants" gr ON r.id = gr."recipientId" JOIN filtered_grants fgr ON gr.id = fgr.id @@ -457,6 +475,7 @@ no_tta AS ( AND (srp.data ->> 'endDate')::DATE > now() - INTERVAL '90 days' WHERE gr.status = 'Active' GROUP BY 1 + ORDER BY 1 ), no_tta_widget AS ( SELECT @@ -481,6 +500,7 @@ no_tta_page AS ( AND a."calculatedStatus" = 'approved' WHERE gr.status = 'Active' GROUP BY 1,2,3 + ORDER BY 1 ), datasets AS ( SELECT 'no_tta_widget' data_set, COUNT(*) records, diff --git a/src/queries/dataRequests/user/communication-logs.sql b/src/queries/dataRequests/user/communication-logs.sql index 7c7a81c1b3..5e1d198e66 100644 --- a/src/queries/dataRequests/user/communication-logs.sql +++ b/src/queries/dataRequests/user/communication-logs.sql @@ -169,14 +169,7 @@ SELECT COALESCE(cl.data ->> 'purpose', '') AS "purpose", COALESCE(cl.data ->> 'duration', '') AS "duration", COALESCE(cl.data ->> 'regionId', '') AS "region", - CASE - WHEN data ->> 'communicationDate' ~ '^[0-9]{1,2}/[0-9]{1,2}/[0-9]{4}$' THEN TO_DATE(data ->> 'communicationDate', 'MM/DD/YYYY') - WHEN data ->> 'communicationDate' ~ '^[0-9]{1,2}/[0-9]{1,2}/[0-9]{2}$' THEN TO_DATE(data ->> 'communicationDate', 'MM/DD/YY') - WHEN data ->> 'communicationDate' ~ '^[0-9]{1,2}-[0-9]{1,2}-[0-9]{2}$' THEN TO_DATE(data ->> 'communicationDate', 'MM-DD-YY') - WHEN data ->> 'communicationDate' ~ '^[0-9]{1,2}/[0-9]{1,2}//[0-9]{2}$' THEN TO_DATE(regexp_replace(data ->> 'communicationDate', '//', '/'), 'MM/DD/YY') - WHEN data ->> 'communicationDate' ~ '^[0-9]{1,2}/[0-9]{1,2}/[0-9]{4}?[0-9]{1,2}.[0-9]{1,2}$' THEN TO_DATE(LEFT(data ->> 'communicationDate', 10), 'MM/DD/YYYY') - ELSE NULL - END AS "communicationDate", + TO_DATE(data ->> 'communicationDate', 'MM/DD/YYYY') "communicationDate", COALESCE(cl.data ->> 'pocComplete', '') AS "pocComplete", COALESCE(cl.data ->> 'notes', '') AS "notes", COALESCE(( diff --git a/src/routes/recipient/handlers.js b/src/routes/recipient/handlers.js index 08cae4288c..d18419b074 100644 --- a/src/routes/recipient/handlers.js +++ b/src/routes/recipient/handlers.js @@ -124,7 +124,11 @@ export async function getGoalsByRecipient(req, res) { const { recipientId, regionId } = req.params; // Get goals for recipient. - const recipientGoals = await getGoalsByActivityRecipient(recipientId, regionId, req.query); + const recipientGoals = await getGoalsByActivityRecipient( + recipientId, + regionId, + req.query, + ); res.json(recipientGoals); } catch (error) { await handleErrors(req, res, error, logContext); diff --git a/src/services/event.ts b/src/services/event.ts index c4d86ef2ea..d59cc38d29 100644 --- a/src/services/event.ts +++ b/src/services/event.ts @@ -1,7 +1,6 @@ /* eslint-disable max-len */ import { Op, cast, WhereOptions as SequelizeWhereOptions } from 'sequelize'; import parse from 'csv-parse/lib/sync'; -import { get } from 'lodash'; import { TRAINING_REPORT_STATUSES as TRS, REASONS, @@ -405,6 +404,9 @@ const checkSessionForCompletion = ( checker: TChecker, missingSessionInfo: TRAlertShape[], ) => { + // this checks to see if the session has been completed + // with a lookup in the form data + // by the owner or the poc (depending on the checker parameter) const sessionValid = !!(session.data[checker]); if (!sessionValid) { @@ -418,8 +420,8 @@ const checkSessionForCompletion = ( ownerId: event.ownerId, pocIds: event.pocIds, collaboratorIds: event.collaboratorIds, - endDate: session.data.startDate, - startDate: session.data.endDate, + endDate: session.data.endDate, + startDate: session.data.startDate, sessionId: session.id, eventStatus: event.data.status, }); @@ -461,6 +463,9 @@ export async function getTrainingReportAlerts( const today = moment().startOf('day'); + // the following three filters are used to determine if the user is the owner, collaborator, or poc + // or if there is no user, in which case the alert is triggered for everyone + // this handles both cases: the alerts table in the UI and the email alerts for a given day const ownerUserIdFilter = (event: EventShape, user: number | undefined) => { if (!user || event.ownerId === user) { return true; diff --git a/src/services/recipient.js b/src/services/recipient.js index 5ea2a9ac68..937dc9cf7b 100644 --- a/src/services/recipient.js +++ b/src/services/recipient.js @@ -37,7 +37,6 @@ import filtersToScopes, { mergeIncludes } from '../scopes'; import orderGoalsBy from '../lib/orderGoalsBy'; import goalStatusByGoalName from '../widgets/goalStatusByGoalName'; import { - findOrFailExistingGoal, responsesForComparison, } from '../goalServices/helpers'; import getCachedResponse from '../lib/cache'; @@ -796,48 +795,18 @@ export async function getGoalsByActivityRecipient( ]; } - sorted = sorted.map((goal) => { - if (goal.goalCollaborators.length === 0) return goal; - - // eslint-disable-next-line no-param-reassign - goal.collaborators = createCollaborators(goal); - - return goal; - }); - - const r = sorted.reduce((previous, current) => { - const existingGoal = findOrFailExistingGoal(current, previous.goalRows); - + // map the goals to the format we need + const r = sorted.map((current) => { allGoalIds.push(current.id); + if (current.goalCollaborators.length > 0) { + // eslint-disable-next-line no-param-reassign + current.collaborators = createCollaborators(current); + } + const isCurated = current.goalTemplate && current.goalTemplate.creationMethod === CREATION_METHOD.CURATED; - if (existingGoal) { - existingGoal.ids = [...existingGoal.ids, current.id]; - existingGoal.goalNumbers = [...existingGoal.goalNumbers, current.goalNumber]; - existingGoal.grantNumbers = uniq([...existingGoal.grantNumbers, current.grant.number]); - existingGoal.objectives = reduceObjectivesForRecipientRecord( - current, - existingGoal, - existingGoal.grantNumbers, - ); - existingGoal.objectiveCount = existingGoal.objectives.length; - existingGoal.isCurated = isCurated || existingGoal.isCurated; - existingGoal.collaborators = existingGoal.collaborators || []; - existingGoal.collaborators = uniqBy([ - ...existingGoal.collaborators, - ...createCollaborators(current), - ], 'goalCreatorName'); - - existingGoal.onAR = existingGoal.onAR || current.onAR; - existingGoal.isReopenedGoal = existingGoal.isReopenedGoal || wasGoalPreviouslyClosed(current); - - return { - goalRows: previous.goalRows, - }; - } - const goalToAdd = { id: current.id, ids: [current.id], @@ -873,11 +842,7 @@ export async function getGoalsByActivityRecipient( goalToAdd.objectiveCount = goalToAdd.objectives.length; - return { - goalRows: [...previous.goalRows, goalToAdd], - }; - }, { - goalRows: [], + return goalToAdd; }); const statuses = await goalStatusByGoalName({ @@ -887,7 +852,7 @@ export async function getGoalsByActivityRecipient( }); // For checkbox selection we only need the primary goal id. - const rolledUpGoalIds = r.goalRows.map((goal) => { + const rolledUpGoalIds = r.map((goal) => { const bucket = { id: goal.id, goalIds: goal.ids, @@ -897,16 +862,16 @@ export async function getGoalsByActivityRecipient( if (limitNum) { return { - count: r.goalRows.length, - goalRows: r.goalRows.slice(offSetNum, offSetNum + limitNum), + count: r.length, + goalRows: r.slice(offSetNum, offSetNum + limitNum), statuses, allGoalIds: rolledUpGoalIds, }; } return { - count: r.goalRows.length, - goalRows: r.goalRows.slice(offSetNum), + count: r.length, + goalRows: r.slice(offSetNum), statuses, allGoalIds: rolledUpGoalIds, }; diff --git a/src/services/recipient.test.js b/src/services/recipient.test.js index 88ff32b33c..7e3f2128a5 100644 --- a/src/services/recipient.test.js +++ b/src/services/recipient.test.js @@ -781,7 +781,7 @@ describe('Recipient DB service', () => { }); const goal1 = await Goal.create({ - name: goal.name, + name: 'Sample goal 1', status: goal.status, grantId: grant.id, onApprovedAR: true, @@ -789,7 +789,7 @@ describe('Recipient DB service', () => { }); const goal2 = await Goal.create({ - name: goal.name, + name: 'Sample goal 2', status: goal.status, grantId: grant.id, onApprovedAR: true, @@ -797,7 +797,7 @@ describe('Recipient DB service', () => { }); const goal3 = await Goal.create({ - name: goal.name, + name: 'Sample goal 3', status: goal.status, grantId: grant.id, onApprovedAR: true, @@ -805,7 +805,7 @@ describe('Recipient DB service', () => { }); const goal4 = await Goal.create({ - name: goal.name, + name: 'Sample goal 4', status: goal.status, grantId: grant.id, onApprovedAR: true, @@ -813,7 +813,7 @@ describe('Recipient DB service', () => { }); const feiGoal = await Goal.create({ - name: goal2Info.name, + name: 'Sample goal FEI 1', status: goal2Info.status, grantId: grant.id, onApprovedAR: true, @@ -904,33 +904,54 @@ describe('Recipient DB service', () => { }); }); - it('properly de-duplicates based on responses', async () => { + it('maintains separate goals for different responses', async () => { const { goalRows, allGoalIds } = await getGoalsByActivityRecipient(recipient.id, region, {}); - expect(goalRows.length).toBe(4); - expect(allGoalIds.length).toBe(4); + expect(goalRows.length).toBe(5); + expect(allGoalIds.length).toBe(5); - const goalWithMultipleIds = allGoalIds.find((g) => g.id === goals[2].id); - expect(goalWithMultipleIds).not.toBeNull(); - expect(goalWithMultipleIds.goalIds).toStrictEqual([goals[2].id, goals[1].id]); + // Assert every goal has its own entry. + const goal1 = goalRows.find((r) => r.ids.includes(goals[0].id)); + expect(goal1).toBeTruthy(); + expect(goal1.ids.length).toBe(1); - const doubler = goalRows.find((r) => r.responsesForComparison === 'not sure,dont have to'); - expect(doubler).toBeTruthy(); + const goal2 = goalRows.find((r) => r.ids.includes(goals[1].id)); + expect(goal2).toBeTruthy(); - expect(doubler.ids.length).toBe(2); + const goal3 = goalRows.find((r) => r.ids.includes(goals[2].id)); + expect(goal3).toBeTruthy(); - const singler = goalRows.find((r) => r.responsesForComparison === 'gotta'); - expect(singler).toBeTruthy(); - expect(singler.ids.length).toBe(1); + const goal4 = goalRows.find((r) => r.ids.includes(goals[3].id)); + expect(goal4).toBeTruthy(); - const noResponse = goalRows.find((r) => r.responsesForComparison === ''); - expect(noResponse).toBeTruthy(); - expect(noResponse.ids.length).toBe(1); + const feiGoal = goalRows.find((r) => r.ids.includes(goals[4].id)); + expect(feiGoal).toBeTruthy(); + + // Assert every response has its own entry. + const gottaResponse = goalRows.find((r) => r.id === goals[0].id); + expect(gottaResponse).toBeTruthy(); + expect(gottaResponse.responsesForComparison).toBe('gotta'); + + const notSureResponse = goalRows.find((r) => r.id === goals[1].id); + expect(notSureResponse).toBeTruthy(); + expect(notSureResponse.responsesForComparison).toBe('not sure,dont have to'); + + const notSureResponse2 = goalRows.find((r) => r.id === goals[2].id); + expect(notSureResponse2).toBeTruthy(); + expect(notSureResponse2.responsesForComparison).toBe('not sure,dont have to'); + + const notSureResponse3 = goalRows.find((r) => r.id === goals[3].id); + expect(notSureResponse3).toBeTruthy(); + expect(notSureResponse3.responsesForComparison).toBe(''); + + const feiResponse = goalRows.find((r) => r.id === goals[4].id); + expect(feiResponse).toBeTruthy(); + expect(feiResponse.responsesForComparison).toBe('fei response 1,fei response 2'); }); it('properly marks is fei goal', async () => { const { goalRows, allGoalIds } = await getGoalsByActivityRecipient(recipient.id, region, {}); - expect(goalRows.length).toBe(4); - expect(allGoalIds.length).toBe(4); + expect(goalRows.length).toBe(5); + expect(allGoalIds.length).toBe(5); // From goal Rows get goal 1. const goal1 = goalRows.find((r) => r.ids.includes(goals[0].id)); @@ -953,19 +974,28 @@ describe('Recipient DB service', () => { expect(feiGoal.isFei).toBe(true); }); - it('properly combines the same goals with no creators/collaborators', async () => { + it('keeps goals separated by goal text when they share the same grant with no creators/collaborators', async () => { // Remove other goals - goals[0].destroy(); - goals[3].destroy(); - goals[4].destroy(); - const { goalRows, allGoalIds } = await getGoalsByActivityRecipient(recipient.id, region, {}); - expect(goalRows.length).toBe(1); - expect(allGoalIds.length).toBe(1); - // Verify goal 2 and 3 have empty creators/collaborators - expect(goalRows[0].collaborators[0].goalCreator).toBe(undefined); - // Verify goal 2 and 3 are rolled up - expect(goalRows[0].ids.length).toBe(2); + expect(goalRows.length).toBe(5); + expect(allGoalIds.length).toBe(5); + + // Verify we have all four goals. + const goal1 = goalRows.find((r) => r.ids.includes(goals[0].id)); + expect(goal1).toBeTruthy(); + + const goal2 = goalRows.find((r) => r.ids.includes(goals[1].id)); + expect(goal2).toBeTruthy(); + + const goal3 = goalRows.find((r) => r.ids.includes(goals[2].id)); + expect(goal3).toBeTruthy(); + + const goal4 = goalRows.find((r) => r.ids.includes(goals[3].id)); + expect(goal4).toBeTruthy(); + + // Verify FEI + const feiGoal = goalRows.find((r) => r.ids.includes(goals[4].id)); + expect(feiGoal).toBeTruthy(); }); }); @@ -1031,10 +1061,11 @@ describe('Recipient DB service', () => { const reason = faker.animal.cetacean(); topics = await Topic.bulkCreate([ - { name: `${faker.company.bsNoun()} ${faker.company.bsNoun()}` }, - { name: `${faker.company.bsNoun()} ${faker.company.bsNoun()}` }, - { name: `${faker.company.bsNoun()} ${faker.company.bsNoun()}` }, - { name: `${faker.company.bsNoun()} ${faker.company.bsNoun()}` }, + { name: 'Topic for Objective 1 a' }, + { name: 'Topic for Objective 1 b' }, + { name: 'Topic for Objective 1 c' }, + { name: 'Topic for Objective 2 b' }, + { name: 'Report Level Topic' }, ]); report = await createReport({ @@ -1045,7 +1076,7 @@ describe('Recipient DB service', () => { ], reason: [reason], calculatedStatus: REPORT_STATUSES.APPROVED, - topics: [topics[0].name], + topics: [topics[4].name], regionId: grant.regionId, }); @@ -1054,12 +1085,23 @@ describe('Recipient DB service', () => { objectiveId: o.id, }))); - await Promise.all((aros.map((aro) => ActivityReportObjectiveTopic.bulkCreate( - topics.map((t) => ({ - activityReportObjectiveId: aro.id, - topicId: t.id, - })), - ))).flat()); + // Disperse topics over objectives to make sure we don't lose any. + await ActivityReportObjectiveTopic.create({ + activityReportObjectiveId: aros[0].id, + topicId: topics[0].id, + }); + await ActivityReportObjectiveTopic.create({ + activityReportObjectiveId: aros[0].id, + topicId: topics[1].id, + }); + await ActivityReportObjectiveTopic.create({ + activityReportObjectiveId: aros[1].id, + topicId: topics[2].id, + }); + await ActivityReportObjectiveTopic.create({ + activityReportObjectiveId: aros[2].id, + topicId: topics[3].id, + }); await ActivityReportGoal.create({ activityReportId: report.id, @@ -1122,20 +1164,54 @@ describe('Recipient DB service', () => { }); }); - it('successfully reduces data without losing topics', async () => { + it('successfully maintains two goals without losing topics', async () => { const goalsForRecord = await getGoalsByActivityRecipient(recipient.id, grant.regionId, {}); - expect(goalsForRecord.count).toBe(1); - expect(goalsForRecord.goalRows.length).toBe(1); - expect(goalsForRecord.allGoalIds.length).toBe(1); + // Assert counts. + expect(goalsForRecord.count).toBe(2); + expect(goalsForRecord.goalRows.length).toBe(2); + expect(goalsForRecord.allGoalIds.length).toBe(2); + + // Select the first goal by goal id. + const goal = goalsForRecord.goalRows.find((g) => g.id === goals[0].id); + expect(goal).toBeTruthy(); - expect(goalsForRecord.goalRows.flatMap((g) => g.goalTopics)).toHaveLength(4); - const goal = goalsForRecord.goalRows[0]; + // Assert the goal has the correct number of objectives. expect(goal.objectives.length).toBe(1); - const objective = goal.objectives[0]; - expect(objective.ids).toHaveLength(3); - expect(objective.ids.every(Boolean)).toBeTruthy(); - expect(objective.topics.length).toBe(4); + + // Assert objective text and status. + expect(goal.objectives[0].title).toBe(objectives[0].title); + expect(goal.objectives[0].status).toBe(objectives[0].status); + expect(goal.objectives[0].title).toBe(objectives[1].title); + expect(goal.objectives[0].status).toBe(objectives[1].status); + + // Assert the goal has the correct number of topics. + expect(goal.goalTopics.length).toBe(4); + + // Assert topic names. + expect(goal.objectives[0].topics).toEqual( + expect.arrayContaining([topics[0].name, topics[1].name, topics[2].name, topics[4].name]), + ); + + // Assert the second goal by id. + const goal2 = goalsForRecord.goalRows.find((g) => g.id === goals[1].id); + expect(goal2).toBeTruthy(); + + // Assert the second goal has the correct number of objectives. + expect(goal2.objectives.length).toBe(1); + // Assert it contains id for objective 3. + + // Assert objective text and status. + expect(goal2.objectives[0].title).toBe(objectives[2].title); + expect(goal2.objectives[0].status).toBe(objectives[2].status); + + // Assert the second goal has the correct number of topics. + expect(goal2.goalTopics.length).toBe(2); + + // Assert topic name. + expect(goal2.objectives[0].topics).toEqual( + expect.arrayContaining([topics[3].name, topics[4].name]), + ); }); }); diff --git a/tests/e2e/activity-report-text-search-filter.spec.ts b/tests/e2e/activity-report-text-search-filter.spec.ts index cde2a2b4ed..e7e3031a0c 100644 --- a/tests/e2e/activity-report-text-search-filter.spec.ts +++ b/tests/e2e/activity-report-text-search-filter.spec.ts @@ -114,6 +114,9 @@ test.describe('Activity Report Text Search Filter', () => { await page.keyboard.type('Learn how to cook.'); await blur(page); + // goal source + await page.getByLabel(/Goal source/i).selectOption('Recipient request'); + // Objective. await page.getByText('Select TTA objective *- Select -').click(); await page.keyboard.press('ArrowDown'); diff --git a/tests/e2e/activity-report.spec.ts b/tests/e2e/activity-report.spec.ts index 9b7f4d4c45..ff8c311d60 100644 --- a/tests/e2e/activity-report.spec.ts +++ b/tests/e2e/activity-report.spec.ts @@ -166,6 +166,7 @@ test.describe('Activity Report', () => { const fullName = await getFullName(page); await page.getByRole('link', { name: 'Activity Reports' }).click(); + await page.getByRole('button', { name: '+ New Activity Report' }).click(); const regionNumber = await getRegionNumber(page); @@ -222,6 +223,7 @@ test.describe('Activity Report', () => { // navigate away await page.getByRole('button', { name: 'Supporting attachments' }).click(); + // PROBLEM: the side nav is not updating to reflect the saved goal.. // navigate back await page.getByRole('button', { name: 'Goals and objectives' }).click() @@ -382,64 +384,91 @@ test.describe('Activity Report', () => { // navigate to the 'Goals & Objectives page await page.getByRole('link', { name: 'RTTAPA' }).click(); // check that previously created goals g1 and g2 are visible - await expect(page.getByText('g1', { exact: true })).toBeVisible(); - await expect(page.getByText('g2', { exact: true })).toBeVisible(); + // Assert there are two instances of 'g1' and 'g2' on the page + await expect(page.getByText('g1', { exact: true }).first()).toBeTruthy(); + await expect(page.getByText('g1', { exact: true }).nth(1)).toBeTruthy(); + + + + await expect(page.getByText('g2', { exact: true }).first()).toBeTruthy(); + await expect(page.getByText('g2', { exact: true }).nth(1)).toBeTruthy(); // look for the goals heading for the previously created goal, e.g. 'Goal G-6, G-5RTTAPA' const g1Goals = page.locator('h3:above(p:text("g1"))').first(); - const g1GoalsTxt = await g1Goals.textContent(); - // get text for the previously created goal's objectives button, - // e.g. 'Goal G-5, G-6RTTAPA' will become 'G-5G-6' - const g1GoalsForObjectives = getGoals(g1GoalsTxt || ''); + // strip 'Goals' and 'RTTAPA' from g1GoalsTxt: e.g "Goal G-5, G-6RTTAPA" will become "G-5, G-6" // look for the goals heading for the previously created goal, e.g. 'Goal G-8, G-7RTTAPA' const g2Goals = page.locator('h3:above(p:text("g2"))').first(); - const g2GoalsTxt = await g2Goals.textContent(); - // extract text used to locate the correct objective's button, - // e.g. 'Goal G-8, G-7RTTAPA' will become 'G-7G-8' - const g2GoalsForObjectives = getGoals(g2GoalsTxt || ''); - // expand objectives for g1 - await page.getByRole('button', { name: `View objectives for goal ${g1GoalsForObjectives}` }).click(); + /* We have Two goals and Two Recipients this should result in 4 goals */ + // Expand objectives for G1. - await expect(page.getByText('g1o1', { exact: true })).toBeVisible(); + // Scroll until the button with the name 'View objectives for goal G-6' is visible. + await page.getByRole('button', { name: 'View objectives for goal G-6' }).scrollIntoViewIfNeeded(); + + await page.getByRole('button', { name: `View objectives for goal G-6` }).click(); + + // Scroll until the button with the name 'View objectives for goal G-5' is visible. + await page.getByRole('button', { name: 'View objectives for goal G-5' }).scrollIntoViewIfNeeded(); + + await page.getByRole('button', { name: `View objectives for goal G-5` }).click(); + + await expect(page.getByText('g1o1', { exact: true }).first()).toBeTruthy(); + await expect(page.getByText('g1o1', { exact: true }).nth(1)).toBeTruthy(); // verify a link to the activity report is found in the objective section - await expect(page.getByRole('link', { name: `R0${regionNumber}-AR-${arNumber}` })).toBeVisible(); + await expect(page.getByRole('link', { name: `R0${regionNumber}-AR-${arNumber}` }).first()).toBeTruthy(); + await expect(page.getByRole('link', { name: `R0${regionNumber}-AR-${arNumber}` }).nth(1)).toBeTruthy(); // Access parent with '..' - await expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText('Grant numbers')).toBeVisible(); + await expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText('Grant numbers').nth(0)).toBeTruthy(); + await expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText('Grant numbers').nth(1)).toBeTruthy(); // verify the grants are visible in the objective section await Promise.all( - grants.map(async (grant) => expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText(grant)).toBeVisible()), + grants.map(async (grant) => expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText(grant)).toBeTruthy()), ); // verify the reason is visible in the objective section - const goalOneContent = await page.getByText('g1o1', { exact: true }).locator('..').locator('..').textContent(); - expect(goalOneContent).toContain('Change in Scope'); - expect(goalOneContent).toContain('Behavioral / Mental Health / Trauma'); + const goalOneContentA = await page.getByText('g1o1', { exact: true }).first().locator('..').locator('..').textContent(); + expect(goalOneContentA).toContain('Change in Scope'); + expect(goalOneContentA).toContain('Behavioral / Mental Health / Trauma'); + const goalOneContentB = await page.getByText('g1o1', { exact: true }).nth(1).locator('..').locator('..').textContent(); + expect(goalOneContentB).toContain('Change in Scope'); + expect(goalOneContentB).toContain('Behavioral / Mental Health / Trauma'); + // verify the end date is visible in the objective section - await expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText('12/01/2050')).toBeVisible(); + await expect(page.getByText('g1o1', { exact: true }).first().locator('..').locator('..').getByText('12/01/2050')).toBeTruthy(); + await expect(page.getByText('g1o1', { exact: true }).nth(1).locator('..').locator('..').getByText('12/01/2050')).toBeTruthy(); // verify the correct status for the objective is visible - await expect(page.getByText('g1o1', { exact: true }).locator('..').locator('..').getByText('Not started')).toBeVisible(); + await expect(page.getByText('g1o1', { exact: true }).first().locator('..').locator('..').getByText('Not started')).toBeTruthy(); + await expect(page.getByText('g1o1', { exact: true }).nth(1).locator('..').locator('..').getByText('Not started')).toBeTruthy(); - // expand objectives for g2 - await page.getByRole('button', { name: `View objectives for goal ${g2GoalsForObjectives}` }).click(); + // Expand goals for G2. + await page.getByRole('button', { name: `View objectives for goal G-7` }).click(); + await page.getByRole('button', { name: `View objectives for goal G-8` }).click(); - await expect(page.getByText('g2o1', { exact: true })).toBeVisible(); + await expect(page.getByText('g2o1', { exact: true }).first()).toBeTruthy(); + await expect(page.getByText('g2o1', { exact: true }).nth(1)).toBeTruthy(); // verify a link to the activity report is found in the objective section - await expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByRole('link', { name: `R0${regionNumber}-AR-${arNumber}` })).toBeVisible(); - await expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText('Grant numbers')).toBeVisible(); + await expect(page.getByText('g2o1', { exact: true }).first().locator('..').locator('..').getByRole('link', { name: `R0${regionNumber}-AR-${arNumber}` })).toBeTruthy(); + await expect(page.getByText('g2o1', { exact: true }).nth(1).locator('..').locator('..').getByRole('link', { name: `R0${regionNumber}-AR-${arNumber}` })).toBeTruthy(); + await expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText('Grant numbers').first()).toBeTruthy(); + await expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText('Grant numbers').nth(1)).toBeTruthy(); // verify the grants are visible in the objective section await Promise.all( - grants.map(async (grant) => expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText(grant)).toBeVisible()), + grants.map(async (grant) => expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText(grant)).toBeTruthy()), ); - const goalTwoContent = await page.getByText('g2o1', {exact: true}).locator('..').locator('..').textContent(); - expect(goalTwoContent).toContain('Change in Scope'); + const goalTwoContentA = await page.getByText('g2o1', {exact: true}).first().locator('..').locator('..').textContent(); + expect(goalTwoContentA).toContain('Change in Scope'); + const goalTwoContentB = await page.getByText('g2o1', {exact: true}).nth(1).locator('..').locator('..').textContent(); + expect(goalTwoContentB).toContain('Change in Scope'); // verify the end date is visible in the objective section - await expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText('12/01/2050')).toBeVisible(); + await expect(page.getByText('g2o1', { exact: true }).first().locator('..').locator('..').getByText('12/01/2050')).toBeTruthy(); // verify the correct status for the objective is visible - await expect(page.getByText('g2o1', { exact: true }).locator('..').locator('..').getByText('Not started')).toBeVisible(); + await expect(page.getByText('g2o1', { exact: true }).nth(1).locator('..').locator('..').getByText('Not started')).toBeTruthy(); // check g1 - await page.getByText('g1', { exact: true }).locator('..').locator('..').locator('..') + await page.getByText('g1', { exact: true }).first().locator('..').locator('..').locator('..') + .getByRole('button', { name: 'Actions for goal' }) + .click(); + await page.getByText('g1', { exact: true }).nth(1).locator('..').locator('..').locator('..') .getByRole('button', { name: 'Actions for goal' }) .click(); // click on the 'Edit' button for 'g1' and verify the correct data is displayed @@ -454,7 +483,10 @@ test.describe('Activity Report', () => { await page.getByRole('link', { name: 'Back to RTTAPA' }).click(); // Check g2 - await page.getByText('g2', { exact: true }).locator('..').locator('..').locator('..') + await page.getByText('g2', { exact: true }).first().locator('..').locator('..').locator('..') + .getByRole('button', { name: 'Actions for goal' }) + .click(); + await page.getByText('g2', { exact: true }).nth(1).locator('..').locator('..').locator('..') .getByRole('button', { name: 'Actions for goal' }) .click(); // click on the 'Edit' button for 'g1' and verify the correct data is displayed @@ -502,6 +534,9 @@ test.describe('Activity Report', () => { // goal end date await page.getByLabel(/anticipated close date/i).fill('01/01/2023'); + // goal source + await page.getByLabel(/Goal source/i).selectOption('Recipient request'); + // add new objective await page.getByRole('button', { name: 'Add new objective' }).click(); @@ -513,8 +548,9 @@ test.describe('Activity Report', () => { await page.getByRole('button', { name: 'Submit goal' }).click(); // confirm goal is in RTR - await expect(page.getByText('This is a goal for multiple grants')).toBeVisible(); - await expect(page.getByRole('heading', { name: /Goal G-(\d), G-(\d)/i }).last()).toBeVisible(); + await expect(page.getByText('This is a goal for multiple grants').first()).toBeVisible(); + await expect(page.getByText('This is a goal for multiple grants').nth(1)).toBeVisible(); + await expect(page.getByRole('heading', { name: /Goal G-(\d)/i }).last()).toBeVisible(); // navigate to the AR page await page.getByRole('link', { name: 'Activity Reports' }).click(); diff --git a/tests/e2e/playwright.config.js b/tests/e2e/playwright.config.js index 5bc0295509..c613e203d7 100644 --- a/tests/e2e/playwright.config.js +++ b/tests/e2e/playwright.config.js @@ -33,6 +33,7 @@ const config = { headless: true, ignoreHTTPSErrors: true, acceptDownloads: true, + viewport: { width: 1920, height: 1080 }, }, }, ], diff --git a/tests/e2e/recipient-record.spec.ts b/tests/e2e/recipient-record.spec.ts index 71a83baa65..c2d9710bc1 100644 --- a/tests/e2e/recipient-record.spec.ts +++ b/tests/e2e/recipient-record.spec.ts @@ -34,6 +34,9 @@ test.describe('Recipient record', () => { await page.keyboard.press('ArrowDown') await page.keyboard.press('Enter'); + // goal source + await page.getByLabel(/Goal source/i).selectOption('Recipient request'); + // add an objective await page.getByRole('button', { name: 'Add new objective' }).click(); await page.getByLabel('TTA objective *').fill('A new objective'); @@ -45,7 +48,7 @@ test.describe('Recipient record', () => { await expect(page.getByText('This is the first goal for this recipient')).toBeVisible(); }); - test('closes a goal', async ({ page }) => { + test('closes a goal', async ({ page }) => { await page.goto('http://localhost:3000/'); // navigate through the recipient record tabs @@ -65,19 +68,22 @@ test.describe('Recipient record', () => { await blur(page); await page.getByLabel('Recipient\'s goal *').fill('This is the second goal for this recipient'); - await page.getByRole('button', { name: /Save and continue/i }).click(); + + await page.getByRole('button', { name: /Save and continue/i }).click(); + + // goal source + await page.getByLabel(/Goal source/i).selectOption('Recipient request'); // edit that goal to add an objective await page.getByRole('button', { name: 'Add new objective' }).click(); await page.getByLabel('TTA objective *').fill('A new objective for this second goal'); await page.getByRole('button', { name: /Save and continue/i }).click(); await page.getByRole('button', { name: 'Submit goal' }).click(); - // verify the goal appears in the table await expect(page.getByText('This is the second goal for this recipient')).toBeVisible(); // get container for the goal - const goal = page.getByTestId('goalCard').filter({ + const goal = page.getByTestId('goalCard').filter({ hasText: 'This is the second goal for this recipient' } ); @@ -86,11 +92,9 @@ test.describe('Recipient record', () => { // expect error await expect(page.getByText(/The goal status cannot be changed until all In progress objectives are complete or suspended./i)).toBeVisible(); - - await goal.getByRole('button', { name: /view objectives for goal/i }).click(); - + await goal.getByTestId('expander-button').click(); const objective = goal.getByTestId('objectiveList').first(); - await objective.getByRole('button').first().click(); + await objective.getByRole('button', { name: 'Change status for objective' }).click(); await objective.getByRole('button', { name: /complete/i }).click(); await page.waitForTimeout(3000); await goal.getByRole('button').first().click(); diff --git a/tools/check-coverage.js b/tools/check-coverage.js new file mode 100644 index 0000000000..c5c01c9fa0 --- /dev/null +++ b/tools/check-coverage.js @@ -0,0 +1,587 @@ +// src/tools/check-coverage.js + +const fs = require('fs'); +const path = require('path'); +const simpleGit = require('simple-git'); +const { createCoverageMap } = require('istanbul-lib-coverage'); +const yargs = require('yargs/yargs'); +const { hideBin } = require('yargs/helpers'); + +// Configuration +const argv = yargs(hideBin(process.argv)) + .option('coverage-file', { + alias: 'c', + type: 'string', + description: 'Specify location of coverage file', + default: '../coverage/coverage-final.json', + }) + .option('artifact-dir', { + alias: 'a', + type: 'string', + description: 'Specify location of artifact dir', + default: '../coverage-artifacts', + }) + .option('directory-filter', { + alias: 'd', + type: 'string', + description: 'filter subdirs', + default: '', + }) + .option('fail-on-uncovered', { + alias: 'f', + type: 'boolean', + description: 'Fail the script if uncovered lines are detected', + default: true, + }) + .option('output-format', { + alias: 'o', + type: 'string', + description: 'Specify output formats (comma-separated, e.g., json,html)', + default: 'json', + }) + .help() + .alias('help', 'h').argv; + +const COVERAGE_FILE = path.resolve(__dirname, argv['coverage-file']); +const BASE_BRANCH = 'main'; +// Directory to store artifacts +const ARTIFACT_DIR = path.resolve(__dirname, argv['artifact-dir']); + +/** + * Fetch the base branch to ensure it's up-to-date. + */ +async function fetchBaseBranch() { + const git = simpleGit(); + await git.fetch('origin', BASE_BRANCH); +} + +/** + * Get the list of modified or added lines in the PR, optionally filtered by directory. + * @param {string} [directoryFilter] - The directory to filter files by (optional). + */ +async function getModifiedLines(directoryFilter = ['src/', 'tools/', 'packages/common/']) { + // eslint-disable-next-line no-console + console.log('getModifiedLines:', directoryFilter); + + const git = simpleGit(); + const diffFiles = await git.diff(['--name-only', `${BASE_BRANCH}...HEAD`]); + // eslint-disable-next-line no-console + console.log('getModifiedLines:\n', diffFiles); + + // Filter files based on the file extension and optional directory + let files = (diffFiles || '') + .split('\n') + .filter((file) => /\.(js|ts)$/.test(file)) + .filter((file) => !file.includes('CLI.js')) + .filter((file) => !file.includes('/__tests__/')) + .filter((file) => !file.includes('.test.')); + + // If a directory is provided, filter files that start with the directory + if (directoryFilter.length > 0) { + files = files.filter((file) => directoryFilter.some((directory) => file.startsWith(directory))); + } + + // eslint-disable-next-line no-console + console.log('files:', files); + + const modifiedLines = {}; + + for (const file of files) { + // Log the file being processed + // eslint-disable-next-line no-console + console.log('getModifiedLines:', file); + const diff = await git.diff(['-U0', `${BASE_BRANCH}...HEAD`, '--', file]); + const regex = /@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/g; + let match; + + while ((match = regex.exec(diff)) !== null) { + const startLine = parseInt(match[1], 10); + const lineCount = match[2] ? parseInt(match[2], 10) : 1; + + if (!modifiedLines[file]) { + modifiedLines[file] = new Set(); + } + for (let i = startLine; i < startLine + lineCount; i++) { + // eslint-disable-next-line no-console + console.log(i); + modifiedLines[file].add(i); + } + } + } + + // Convert sets to arrays + Object.keys(modifiedLines).forEach((file) => { + modifiedLines[file] = Array.from(modifiedLines[file]); + }); + + return modifiedLines; +} + +/** + * Load and parse the merged coverage report. + */ +function loadCoverage(coverageFile = COVERAGE_FILE) { + try { + if (!fs.existsSync(coverageFile)) { + const errorMessage = `Coverage file not found at ${coverageFile}`; + // eslint-disable-next-line no-console + console.error(errorMessage); + throw new Error(errorMessage); + } + + const coverageData = JSON.parse(fs.readFileSync(coverageFile, 'utf8')); + const coverageMap = createCoverageMap(coverageData); + return coverageMap; + } catch (error) { + throw new Error(`Failed to parse coverage data at ${coverageFile}`); + } +} + +// Helper function to get an array of lines from a location +function getLinesFromLocation(loc) { + const lines = []; + for (let i = loc.start.line; i <= loc.end.line; i++) { + lines.push(i); + } + return lines; +} + +// Helper function to get overlapping lines between two arrays +function linesIntersect(lines1, lines2) { + return lines1.filter((line) => lines2.includes(line)); +} + +// Helper function to adjust location to only include overlapping lines +function intersectLocationWithLines(loc, overlappingLines) { + if (overlappingLines.length === 0) { + return null; // No overlap + } + const newStartLine = Math.max(loc.start.line, Math.min(...overlappingLines)); + const newEndLine = Math.min(loc.end.line, Math.max(...overlappingLines)); + + const newStart = { ...loc.start }; + const newEnd = { ...loc.end }; + + // Adjust start line and column + if (newStartLine !== loc.start.line) { + newStart.line = newStartLine; + newStart.column = 0; // Reset column since line changed + } + + // Adjust end line and column + if (newEndLine !== loc.end.line) { + newEnd.line = newEndLine; + newEnd.column = undefined; // Column is unknown + } + + return { start: newStart, end: newEnd }; +} + +/** + * Check if modified lines are covered. + */ +function checkCoverage(modifiedLines, coverageMap) { + const uncovered = {}; + + Object.entries(modifiedLines).forEach(([file, lines]) => { + const normalizedFile = path.resolve(process.cwd(), file); + const relativeFile = path.relative(process.cwd(), normalizedFile); + + let fileCoverage; + try { + fileCoverage = coverageMap.fileCoverageFor(normalizedFile); + if (!fileCoverage) { + throw new Error(`File not found in coverage map: ${normalizedFile}`); + } + } catch (e) { + const ranges = groupIntoRanges(lines); + console.log('checkCoverage:',ranges); + uncovered[relativeFile] = uncovered[relativeFile] || { statements: [], functions: [], branches: [] }; + ranges.forEach(({ start, end }) => { + uncovered[relativeFile].statements.push({ + start: { line: start, column: 0 }, + end: { line: end, column: 0 }, + }); + }); + return; + } + + uncovered[relativeFile] = { statements: [], functions: [], branches: [] }; + + // Check uncovered statements + Object.entries(fileCoverage.statementMap).forEach(([id, loc]) => { + const statementLines = getLinesFromLocation(loc); + const overlappingLines = linesIntersect(lines, statementLines); + console.log('checkCoverage:',overlappingLines); + if (overlappingLines.length > 0 && fileCoverage.s[id] === 0) { + const intersectedLoc = intersectLocationWithLines(loc, overlappingLines); + if (intersectedLoc) { + uncovered[relativeFile].statements.push({ + id, + start: intersectedLoc.start, + end: intersectedLoc.end, + }); + } + } + }); + + // Check uncovered functions + Object.entries(fileCoverage.fnMap).forEach(([id, fn]) => { + const functionLines = getLinesFromLocation(fn.loc); + const overlappingLines = linesIntersect(lines, functionLines); + if (overlappingLines.length > 0 && fileCoverage.f[id] === 0) { + const intersectedLoc = intersectLocationWithLines(fn.loc, overlappingLines); + if (intersectedLoc) { + uncovered[relativeFile].functions.push({ + id, + name: fn.name, + start: intersectedLoc.start, + end: intersectedLoc.end, + }); + } + } + }); + + // Check uncovered branches + Object.entries(fileCoverage.branchMap).forEach(([id, branch]) => { + branch.locations.forEach((loc, idx) => { + const branchLines = getLinesFromLocation(loc); + const overlappingLines = linesIntersect(lines, branchLines); + if (overlappingLines.length > 0 && fileCoverage.b[id][idx] === 0) { + const intersectedLoc = intersectLocationWithLines(loc, overlappingLines); + if (intersectedLoc) { + uncovered[relativeFile].branches.push({ + id, + locationIndex: idx, + start: intersectedLoc.start, + end: intersectedLoc.end, + }); + } + } + }); + }); + + // Remove empty file entry if no uncovered items were found + if ( + uncovered[relativeFile].statements.length === 0 && + uncovered[relativeFile].functions.length === 0 && + uncovered[relativeFile].branches.length === 0 + ) { + delete uncovered[relativeFile]; + } + }); + + return uncovered; +} + +function groupIntoRanges(lines) { + const ranges = []; + if (lines.length === 0) { + return ranges; + } + + lines.sort((a, b) => a - b); + let start = lines[0]; + let end = lines[0]; + + for (let i = 1; i < lines.length; i++) { + if (lines[i] === end + 1) { + // Contiguous line + end = lines[i]; + } else { + // Not contiguous, save the previous range + ranges.push({ start, end }); + start = lines[i]; + end = lines[i]; + } + } + // Push the last range + ranges.push({ start, end }); + + return ranges; +} + +/** + * Generate an artifact report for uncovered lines. + */ +function generateArtifact(uncovered, artifactDir = ARTIFACT_DIR) { + if (!fs.existsSync(artifactDir)) { + fs.mkdirSync(artifactDir, { recursive: true }); + } + + const artifactPath = path.join(artifactDir, 'uncovered-lines.json'); + + fs.writeFileSync(artifactPath, JSON.stringify(uncovered, null, 2), 'utf-8'); + // eslint-disable-next-line no-console + console.log(`JSON artifact generated at ${artifactPath}`); +} + +/** + * Generate an HTML report for uncovered lines. + */ +function generateHtmlReport(uncovered, artifactDir = ARTIFACT_DIR) { + if (!fs.existsSync(artifactDir)) { + fs.mkdirSync(artifactDir, { recursive: true }); + } + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + + if (Object.keys(uncovered).length === 0) { + const htmlContent = ` + + Coverage Report + +

Coverage Report

+

All modified lines are covered by tests.

+ + + `; + fs.writeFileSync(artifactPath, htmlContent, 'utf-8'); + console.log(`HTML report generated at ${artifactPath}`); + return; + } + + let htmlContent = ` + + + Uncovered Lines Report + + + +

Uncovered Lines Report

+ `; + + Object.entries(uncovered).forEach(([file, data]) => { + htmlContent += `

${file}

`; + + if (data.statements.length > 0) { + htmlContent += `

Statements

`; + htmlContent += ` + + + + + + + + + + `; + data.statements.forEach((stmt) => { + htmlContent += ` + + + + + + `; + }); + htmlContent += ` + +
IDStart LineEnd Line
${stmt.id || ''}${stmt.start.line}${stmt.end.line}
+ `; + } + + if (data.functions.length > 0) { + htmlContent += `

Functions

`; + htmlContent += ` + + + + + + + + + + + `; + data.functions.forEach((fn) => { + htmlContent += ` + + + + + + + `; + }); + htmlContent += ` + +
IDNameStart LineEnd Line
${fn.id}${fn.name}${fn.start.line}${fn.end.line}
+ `; + } + + if (data.branches.length > 0) { + htmlContent += `

Branches

`; + htmlContent += ` + + + + + + + + + + + `; + data.branches.forEach((branch) => { + htmlContent += ` + + + + + + + `; + }); + htmlContent += ` + +
IDBranch IndexStart LineEnd Line
${branch.id}${branch.locationIndex}${branch.start.line}${branch.end.line}
+ `; + } + }); + + htmlContent += ` + + + `; + + fs.writeFileSync(artifactPath, htmlContent, 'utf-8'); + console.log(`HTML report generated at ${artifactPath}`); +} + +/** + * Main function to execute the coverage check. + * Extracted for testing purposes. + */ +async function main({ + coverageFile = COVERAGE_FILE, + artifactDir = ARTIFACT_DIR, + directoryFilter = (argv['directory-filter'] || '').split(','), + failOnUncovered = argv['fail-on-uncovered'], + outputFormat = argv['output-format'], +} = {}) { + try { + // eslint-disable-next-line no-console + console.log('Fetching base branch...'); + await fetchBaseBranch(); + + // eslint-disable-next-line no-console + console.log('Identifying modified lines...'); + const modifiedLines = await getModifiedLines(directoryFilter); + + // eslint-disable-next-line no-console + console.log('Loading coverage data...'); + const coverageMap = loadCoverage(coverageFile); + + // eslint-disable-next-line no-console + console.log('Checking coverage...'); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + if (Object.keys(uncovered).length > 0) { + // eslint-disable-next-line no-console + console.log('Uncovered lines detected:', uncovered); + Object.entries(uncovered).forEach(([file, data]) => { + console.error(`- ${file}:`); + if (data.statements.length > 0) { + const lines = data.statements.map((stmt) => stmt.start.line).sort((a, b) => a - b); + const ranges = groupIntoRanges(lines); + const rangeStrings = ranges + .map((range) => + range.start === range.end ? `${range.start}` : `${range.start}-${range.end}`, + ) + .join(', '); + console.log(` Statements: ${rangeStrings}`); + } + if (data.functions.length > 0) { + const lines = data.functions.map((fn) => fn.start.line).sort((a, b) => a - b); + const ranges = groupIntoRanges(lines); + const rangeStrings = ranges + .map((range) => + range.start === range.end ? `${range.start}` : `${range.start}-${range.end}`, + ) + .join(', '); + console.log(` Functions: ${rangeStrings}`); + } + if (data.branches.length > 0) { + const lines = data.branches.map((branch) => branch.start.line).sort((a, b) => a - b); + const ranges = groupIntoRanges(lines); + const rangeStrings = ranges + .map((range) => + range.start === range.end ? `${range.start}` : `${range.start}-${range.end}`, + ) + .join(', '); + console.log(` Branches: ${rangeStrings}`); + } + }); + + // Generate JSON artifact + if (outputFormat.includes('json')) { + generateArtifact(uncovered, artifactDir); + } + + // Generate HTML report if specified + if (outputFormat.includes('html')) { + generateHtmlReport(uncovered, artifactDir); + } + + if (failOnUncovered) { + // eslint-disable-next-line no-console + console.log('Coverage check failed due to uncovered lines.'); + process.exit(1); + } + } else { + // eslint-disable-next-line no-console + console.log('All modified lines are covered by tests.'); + + if (outputFormat.includes('html')) { + generateHtmlReport(uncovered, artifactDir); + } + } + } catch (error) { + // eslint-disable-next-line no-console + console.error('Error during coverage check:', error); + process.exit(1); + } +} + +// Run the script only if it's the main module +if (require.main === module) { + main(); +} + +module.exports = { + fetchBaseBranch, + getModifiedLines, + loadCoverage, + getLinesFromLocation, + linesIntersect, + intersectLocationWithLines, + checkCoverage, + groupIntoRanges, + generateArtifact, + generateHtmlReport, + main, + argv, + COVERAGE_FILE, + ARTIFACT_DIR, +}; diff --git a/tools/check-coverage.test.js b/tools/check-coverage.test.js new file mode 100644 index 0000000000..82aefdf90f --- /dev/null +++ b/tools/check-coverage.test.js @@ -0,0 +1,987 @@ +// tests/check-coverage.test.js + +const fs = require('fs'); +const path = require('path'); +const os = require('os'); +const simpleGit = require('simple-git'); +const { createCoverageMap } = require('istanbul-lib-coverage'); +const { + fetchBaseBranch, + getModifiedLines, + loadCoverage, + checkCoverage, + groupIntoRanges, + generateArtifact, + generateHtmlReport, + intersectLocationWithLines, + main, +} = require('./check-coverage'); + +jest.mock('simple-git'); + +describe('check-coverage script', () => { + let tmpDir; + let originalCwd; + + beforeEach(() => { + tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'check-coverage-test-')); + originalCwd = process.cwd(); + process.chdir(tmpDir); + }); + + afterEach(() => { + process.chdir(originalCwd); + fs.rmSync(tmpDir, { recursive: true, force: true }); + jest.restoreAllMocks(); + }); + + describe('fetchBaseBranch', () => { + it('should fetch the base branch without errors', async () => { + const gitFetchMock = jest.fn().mockResolvedValue(); + simpleGit.mockReturnValue({ fetch: gitFetchMock }); + + await expect(fetchBaseBranch()).resolves.not.toThrow(); + expect(gitFetchMock).toHaveBeenCalled(); + }); + + it('should throw an error if git fetch fails', async () => { + const gitFetchMock = jest.fn().mockRejectedValue(new Error('Fetch failed')); + simpleGit.mockReturnValue({ fetch: gitFetchMock }); + + await expect(fetchBaseBranch()).rejects.toThrow('Fetch failed'); + }); + }); + + describe('getModifiedLines', () => { + it('should return modified lines for JavaScript files', async () => { + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('src/file1.js\nsrc/file2.ts\n') // Mock for diffFiles + .mockResolvedValueOnce('@@ -1,0 +1,2 @@\n+line1\n+line2\n') // Mock diff for file1.js + .mockResolvedValueOnce('@@ -1,0 +1 @@\n+line1\n'); // Mock diff for file2.ts + + simpleGit.mockReturnValue({ diff: gitDiffMock }); + + const modifiedLines = await getModifiedLines(); + + expect(modifiedLines).toEqual({ + 'src/file1.js': [1, 2], + 'src/file2.ts': [1], + }); + }); + + it('should filter files by directory if provided', async () => { + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('src/file1.js\ntests/file2.ts\n') // Mock for diffFiles + .mockResolvedValueOnce('@@ -1,0 +1,2 @@\n+line1\n+line2\n') // Mock diff for src/file1.js + .mockResolvedValueOnce(''); // No diff for tests/file2.ts + + simpleGit.mockReturnValue({ diff: gitDiffMock }); + + const modifiedLines = await getModifiedLines(['src/']); + + expect(modifiedLines).toEqual({ + 'src/file1.js': [1, 2], + }); + }); + + it('should not filter files by directory if provided', async () => { + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('src/file1.js\nunfiltered/file2.ts\n') // Mock for diffFiles + .mockResolvedValueOnce('@@ -1,0 +1,2 @@\n+line1\n+line2\n') // Mock diff for src/file1.js + .mockResolvedValueOnce('@@ -1,0 +1,2 @@\n+line1\n+line2\n'); // No diff for tests/file2.ts + + simpleGit.mockReturnValue({ diff: gitDiffMock }); + + const modifiedLines = await getModifiedLines([]); + + expect(modifiedLines).toEqual({ + 'src/file1.js': [1, 2], + 'unfiltered/file2.ts': [1,2], + }); + }); + + it('should return an empty object if there are no modified files', async () => { + const gitDiffMock = jest.fn().mockResolvedValue(''); + simpleGit.mockReturnValue({ diff: gitDiffMock }); + + const modifiedLines = await getModifiedLines(); + expect(modifiedLines).toEqual({}); + }); + + it('should ignore non-JavaScript/TypeScript files', async () => { + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('file1.py\nfile2.txt\n') // Files that should be ignored + .mockResolvedValue(''); // No line diffs + + simpleGit.mockReturnValue({ diff: gitDiffMock }); + + const modifiedLines = await getModifiedLines(); + expect(modifiedLines).toEqual({}); + }); + }); + + describe('loadCoverage', () => { + it('should load and return the coverage map', () => { + const coverageData = { + 'file1.js': { + path: 'file1.js', + statementMap: {}, + fnMap: {}, + branchMap: {}, + s: {}, + f: {}, + b: {}, + }, + }; + + const coverageFile = path.join(tmpDir, 'coverage-final.json'); + fs.writeFileSync(coverageFile, JSON.stringify(coverageData)); + + const coverageMap = loadCoverage(coverageFile); + + expect(coverageMap.files()).toContain('file1.js'); + }); + + it('should throw an error if coverage file does not exist', () => { + const consoleErrorMock = jest.spyOn(console, 'error').mockImplementation(() => {}); + + expect(() => loadCoverage('non-existent-file.json')).toThrow( + 'Failed to parse coverage data at non-existent-file.json' + ); + + expect(consoleErrorMock).toHaveBeenCalledWith( + expect.stringContaining('Coverage file not found at') + ); + }); + + it('should throw an error if the coverage file is corrupted', () => { + const coverageFile = path.join(tmpDir, 'corrupted-coverage.json'); + fs.writeFileSync(coverageFile, 'Not JSON content'); // Non-JSON content + + expect(() => loadCoverage(coverageFile)).toThrow( + 'Failed to parse coverage data at' + ); + }); + + it('should handle malformed JSON data gracefully', () => { + const coverageFile = path.join(tmpDir, 'bad-data.json'); + fs.writeFileSync(coverageFile, '{"bad json}'); + expect(() => loadCoverage(coverageFile)).toThrow('Failed to parse coverage data'); + }); + + }); + + describe('checkCoverage', () => { + it('should identify uncovered statements', () => { + const modifiedLines = { + 'file1.js': [1, 2, 3], + }; + + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { + '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + '1': { start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }, + '2': { start: { line: 3, column: 0 }, end: { line: 3, column: 0 } }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 0, '1': 1, '2': 0 }, + f: {}, + b: {}, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({ + 'file1.js': { + statements: [ + { id: '0', start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + { id: '2', start: { line: 3, column: 0 }, end: { line: 3, column: 0 } }, + ], + functions: [], + branches: [], + }, + }); + }); + + it('should consider all lines uncovered if file is not in coverage map', () => { + const modifiedLines = { + 'file1.js': [1, 2, 3], + }; + + const coverageMap = createCoverageMap({}); + + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({ + 'file1.js': { + statements: [ + { start: { line: 1, column: 0 }, end: { line: 3, column: 0 } }, + ], + functions: [], + branches: [], + }, + }); + }); + + it('should mark all lines as uncovered if none are covered in modified files', () => { + const modifiedLines = { + 'file1.js': [1, 2, 3], + }; + + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { + '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + '1': { start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }, + '2': { start: { line: 3, column: 0 }, end: { line: 3, column: 0 } }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 0, '1': 0, '2': 0 }, + f: {}, + b: {}, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({ + 'file1.js': { + statements: [ + { id: '0', start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + { id: '1', start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }, + { id: '2', start: { line: 3, column: 0 }, end: { line: 3, column: 0 } }, + ], + functions: [], + branches: [], + }, + }); + }); + + it('should return uncovered lines when some lines are not covered', () => { + const modifiedLines = { 'someFile.js': [1, 2, 3] }; + const normalizedFilePath = path.resolve(process.cwd(), 'someFile.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { + '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + '1': { start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }, + '2': { start: { line: 3, column: 0 }, end: { line: 3, column: 0 } }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 0, '1': 3, '2': 3 }, + f: {}, + b: {}, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(Object.keys(uncovered)).toContain('someFile.js'); + }); + + it('should return an empty object when all lines are covered', () => { + const modifiedLines = { 'fullyCoveredFile.js': [1, 2, 3] }; + const normalizedFilePath = path.resolve(process.cwd(), 'fullyCoveredFile.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { + '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + '1': { start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }, + '2': { start: { line: 3, column: 0 }, end: { line: 3, column: 0 } }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 3, '1': 3, '2': 1 }, + f: {}, + b: {}, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({}); + }); + + it('should identify uncovered functions', () => { + const modifiedLines = { + 'fileWithUncoveredFunctions.js': [1, 2, 3], + }; + + const normalizedFilePath = path.resolve(process.cwd(), 'fileWithUncoveredFunctions.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: {}, + fnMap: { + '0': { name: 'functionOne', loc: { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } } }, + '1': { name: 'functionTwo', loc: { start: { line: 2, column: 0 }, end: { line: 2, column: 0 } } }, + }, + branchMap: {}, + s: {}, + f: { '0': 0, '1': 1 }, + b: {}, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({ + 'fileWithUncoveredFunctions.js': { + statements: [], + functions: [ + { id: '0', name: 'functionOne', start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + ], + branches: [], + }, + }); + }); + + it('should identify uncovered branches', () => { + const modifiedLines = { + 'fileWithUncoveredBranches.js': [1, 2, 3], + }; + + const normalizedFilePath = path.resolve(process.cwd(), 'fileWithUncoveredBranches.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: {}, + fnMap: {}, + branchMap: { + '0': { locations: [{ start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }] }, + '1': { locations: [{ start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }] }, + }, + s: {}, + f: {}, + b: { '0': [0], '1': [1] }, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({ + 'fileWithUncoveredBranches.js': { + statements: [], + functions: [], + branches: [ + { id: '0', locationIndex: 0, start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + ], + }, + }); + }); + + it('should return uncovered functions and branches when both are not covered', () => { + const modifiedLines = { 'complexFile.js': [1, 2, 3] }; + const normalizedFilePath = path.resolve(process.cwd(), 'complexFile.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: {}, + fnMap: { + '0': { name: 'funcOne', loc: { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } } }, + }, + branchMap: { + '1': { locations: [{ start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }] }, + }, + s: {}, + f: { '0': 0 }, + b: { '1': [0] }, + }, + }; + + const coverageMap = createCoverageMap(coverageData); + const uncovered = checkCoverage(modifiedLines, coverageMap); + + expect(uncovered).toEqual({ + 'complexFile.js': { + statements: [], + functions: [ + { id: '0', name: 'funcOne', start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + ], + branches: [ + { id: '1', locationIndex: 0, start: { line: 2, column: 0 }, end: { line: 2, column: 0 } }, + ], + }, + }); + }); + + it('should throw an error when file is not found in the coverage map', () => { + const modifiedLines = { 'missingFile.js': [1, 2] }; + const coverageMap = { + fileCoverageFor: () => null, + }; + + const uncovered = checkCoverage(modifiedLines, coverageMap); + expect(uncovered).toEqual({ + 'missingFile.js': { + statements: [ + { start: { line: 1, column: 0 }, end: { line: 2, column: 0 } }, + ], + functions: [], + branches: [], + }, + }); + }); + + it('should not create a new entry in uncovered if already exists', () => { + const modifiedLines = { 'file1.js': [1] }; + const coverageMap = createCoverageMap({ + 'file1.js': { + path: 'file1.js', + statementMap: {}, + fnMap: {}, + branchMap: {}, + s: {}, + f: {}, + b: {} + } + }); + const uncovered = { 'file1.js': { statements: [], functions: [], branches: [] } }; + + checkCoverage(modifiedLines, coverageMap); + expect(Object.keys(uncovered)).toContain('file1.js'); + }); + }); + + describe('groupIntoRanges', () => { + it('should group consecutive lines into ranges', () => { + const lines = [1, 2, 3, 5, 6, 8]; + const ranges = groupIntoRanges(lines); + + expect(ranges).toEqual([ + { start: 1, end: 3 }, + { start: 5, end: 6 }, + { start: 8, end: 8 }, + ]); + }); + + it('should return empty array for empty input', () => { + const ranges = groupIntoRanges([]); + expect(ranges).toEqual([]); + }); + }); + + describe('generateArtifact', () => { + it('should generate JSON artifact with uncovered lines', () => { + const uncovered = { + 'file1.js': { + statements: [{ id: '0', start: { line: 1 }, end: { line: 1 } }], + functions: [], + branches: [], + }, + }; + + const artifactDir = path.join(tmpDir, 'artifacts'); + generateArtifact(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.json'); + expect(fs.existsSync(artifactPath)).toBe(true); + + const content = JSON.parse(fs.readFileSync(artifactPath, 'utf-8')); + expect(content).toEqual(uncovered); + }); + + it('should not create artifact directory if it exists', () => { + const uncovered = {}; + const artifactDir = path.join(tmpDir, 'existingDir'); + fs.mkdirSync(artifactDir); + + generateArtifact(uncovered, artifactDir); + expect(fs.existsSync(artifactDir)).toBe(true); + }); + + it('should include statements in the artifact if uncovered statements exist', () => { + const uncovered = { + 'file1.js': { + statements: [{ id: '0', start: { line: 1 }, end: { line: 2 } }], + functions: [], + branches: [], + }, + }; + const artifactDir = path.join(tmpDir, 'artifacts'); + generateArtifact(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.json'); + const content = JSON.parse(fs.readFileSync(artifactPath, 'utf-8')); + expect(content['file1.js'].statements).toHaveLength(1); + }); + + it('should include functions in the artifact if uncovered functions exist', () => { + const uncovered = { + 'file1.js': { + statements: [], + functions: [{ id: '0', name: 'myFunc', start: { line: 1 }, end: { line: 2 } }], + branches: [], + }, + }; + const artifactDir = path.join(tmpDir, 'artifacts'); + generateArtifact(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.json'); + const content = JSON.parse(fs.readFileSync(artifactPath, 'utf-8')); + expect(content['file1.js'].functions).toHaveLength(1); + }); + }); + + describe('generateHtmlReport', () => { + it('should generate HTML report with uncovered lines', () => { + const uncovered = { + 'file1.js': { + statements: [{ id: '0', start: { line: 1 }, end: { line: 1 } }], + functions: [], + branches: [], + }, + }; + + const artifactDir = path.join(tmpDir, 'artifacts'); + generateHtmlReport(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + expect(fs.existsSync(artifactPath)).toBe(true); + + const content = fs.readFileSync(artifactPath, 'utf-8'); + expect(content).toContain('

file1.js

'); + expect(content).toContain('1'); + }); + + it('should generate a report indicating all lines are covered when no uncovered lines', () => { + const uncovered = {}; + + const artifactDir = path.join(tmpDir, 'artifacts'); + generateHtmlReport(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + expect(fs.existsSync(artifactPath)).toBe(true); + + const content = fs.readFileSync(artifactPath, 'utf-8'); + expect(content).toContain('

All modified lines are covered by tests.

'); + }); + + it('should generate HTML report if outputFormat includes html', () => { + const uncovered = { + 'file1.js': { + statements: [], + functions: [], + branches: [], + }, + }; + const artifactDir = path.join(tmpDir, 'artifacts'); + generateHtmlReport(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + expect(fs.existsSync(artifactPath)).toBe(true); + }); + it('should include functions in the HTML report when uncovered functions are present', () => { + const uncovered = { + 'file1.js': { + statements: [], + functions: [ + { id: '1', name: 'myFunction', start: { line: 5 }, end: { line: 10 } }, + ], + branches: [], + }, + }; + + const artifactDir = path.join(tmpDir, 'artifacts'); + generateHtmlReport(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + expect(fs.existsSync(artifactPath)).toBe(true); + + const content = fs.readFileSync(artifactPath, 'utf-8'); + expect(content).toContain('

file1.js

'); + expect(content).toContain('

Functions

'); + expect(content).toContain('1'); // function ID + expect(content).toContain('myFunction'); // function name + expect(content).toContain('5'); // start line + expect(content).toContain('10'); // end line + }); + + it('should include branches in the HTML report when uncovered branches are present', () => { + const uncovered = { + 'file1.js': { + statements: [], + functions: [], + branches: [ + { + id: '2', + locationIndex: 0, + start: { line: 15 }, + end: { line: 20 }, + }, + ], + }, + }; + + const artifactDir = path.join(tmpDir, 'artifacts'); + generateHtmlReport(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + expect(fs.existsSync(artifactPath)).toBe(true); + + const content = fs.readFileSync(artifactPath, 'utf-8'); + expect(content).toContain('

file1.js

'); + expect(content).toContain('

Branches

'); + expect(content).toContain('2'); // branch ID + expect(content).toContain('0'); // location index + expect(content).toContain('15'); // start line + expect(content).toContain('20'); // end line + }); + + it('should include statements, functions, and branches in the same HTML report when all are present', () => { + const uncovered = { + 'file1.js': { + statements: [{ id: '0', start: { line: 1 }, end: { line: 1 } }], + functions: [ + { id: '1', name: 'myFunction', start: { line: 5 }, end: { line: 10 } }, + ], + branches: [ + { + id: '2', + locationIndex: 0, + start: { line: 15 }, + end: { line: 20 }, + }, + ], + }, + }; + + const artifactDir = path.join(tmpDir, 'artifacts'); + generateHtmlReport(uncovered, artifactDir); + + const artifactPath = path.join(artifactDir, 'uncovered-lines.html'); + expect(fs.existsSync(artifactPath)).toBe(true); + + const content = fs.readFileSync(artifactPath, 'utf-8'); + expect(content).toContain('

file1.js

'); + expect(content).toContain('

Statements

'); + expect(content).toContain('1'); // statement start line + + expect(content).toContain('

Functions

'); + expect(content).toContain('1'); // function ID + expect(content).toContain('myFunction'); + expect(content).toContain('5'); // function start line + expect(content).toContain('10'); // function end line + + expect(content).toContain('

Branches

'); + expect(content).toContain('2'); // branch ID + expect(content).toContain('0'); // location index + expect(content).toContain('15'); // branch start line + expect(content).toContain('20'); // branch end line + }); + }); + + describe('intersectLocationWithLines', () => { + it('should return null when there are no overlapping lines', () => { + const loc = { start: { line: 5, column: 2 }, end: { line: 10, column: 4 } }; + const overlappingLines = []; + expect(intersectLocationWithLines(loc, overlappingLines)).toBeNull(); + }); + + it('should adjust the start line when there is partial overlap at the start', () => { + const loc = { start: { line: 5, column: 2 }, end: { line: 10, column: 4 } }; + const overlappingLines = [6, 7, 8]; + const result = intersectLocationWithLines(loc, overlappingLines); + + expect(result).toEqual({ + start: { line: 6, column: 0 }, + end: { line: 8, column: undefined }, + }); + }); + + it('should adjust the end line when there is partial overlap at the end', () => { + const loc = { start: { line: 5, column: 2 }, end: { line: 10, column: 4 } }; + const overlappingLines = [8, 9]; + const result = intersectLocationWithLines(loc, overlappingLines); + + expect(result).toEqual({ + start: { line: 8, column: 0 }, + end: { line: 9, column: undefined }, + }); + }); + + it('should adjust both start and end lines when there is a complete overlap', () => { + const loc = { start: { line: 5, column: 2 }, end: { line: 10, column: 4 } }; + const overlappingLines = [6, 7, 8, 9]; + const result = intersectLocationWithLines(loc, overlappingLines); + + expect(result).toEqual({ + start: { line: 6, column: 0 }, + end: { line: 9, column: undefined }, + }); + }); + + it('should return the original location when overlapping lines fully cover the location', () => { + const loc = { start: { line: 5, column: 2 }, end: { line: 10, column: 4 } }; + const overlappingLines = [5, 6, 7, 8, 9, 10]; + const result = intersectLocationWithLines(loc, overlappingLines); + + expect(result).toEqual({ + start: { line: 5, column: 2 }, + end: { line: 10, column: 4 }, + }); + }); + }); + + describe('main', () => { + it('should complete successfully when all lines are covered', async () => { + jest.spyOn(console, 'log').mockImplementation(() => {}); + jest.spyOn(console, 'error').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + + // Mock git functions + const gitFetchMock = jest.fn().mockResolvedValue(); + const gitRawMock = jest.fn().mockResolvedValue('1234567890abcdef\n'); + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('file1.js\n') + .mockResolvedValueOnce('@@ -0,0 +1 @@\n+line1\n'); + + simpleGit.mockReturnValue({ + fetch: gitFetchMock, + raw: gitRawMock, + diff: gitDiffMock, + }); + + // Prepare coverage data + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { + '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 1 }, + f: {}, + b: {}, + }, + }; + + const coverageFile = path.join(tmpDir, 'coverage-final.json'); + fs.writeFileSync(coverageFile, JSON.stringify(coverageData)); + + await main({ + coverageFile, + artifactDir: path.join(tmpDir, 'artifacts'), + outputFormat: 'json', + }); + + expect(console.log.mock.calls.flat()).toContain('All modified lines are covered by tests.'); + expect(process.exit).not.toHaveBeenCalled(); + }); + + it('should fail when uncovered lines are detected', async () => { + jest.spyOn(console, 'log').mockImplementation(() => {}); + jest.spyOn(console, 'error').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + + // Mock git functions + const gitFetchMock = jest.fn().mockResolvedValue(); + const gitRawMock = jest.fn().mockResolvedValue('1234567890abcdef\n'); + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('file1.js\n') // Modified file + .mockResolvedValueOnce('@@ -1,0 +1 @@\n+line1\n'); // Modified line in file1.js + + simpleGit.mockReturnValue({ + fetch: gitFetchMock, + raw: gitRawMock, + diff: gitDiffMock, + }); + + // Provide coverage data without coverage for the modified lines + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } } }, + fnMap: {}, + branchMap: {}, + s: { '0': 0 }, // Mark line as uncovered + f: {}, + b: {}, + }, + }; + + const coverageFile = path.join(tmpDir, 'coverage-final.json'); + fs.writeFileSync(coverageFile, JSON.stringify(coverageData)); + + await main({ + coverageFile, + artifactDir: path.join(tmpDir, 'artifacts'), + outputFormat: 'json', + }); + + expect(console.log.mock.calls.flat()).toContain('Uncovered lines detected:'); + expect(process.exit).toHaveBeenCalledWith(1); + }); + + it('should log an error if the coverage file is missing', async () => { + jest.spyOn(console, 'error').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + + await main({ + coverageFile: 'missing-coverage.json', + artifactDir: path.join(tmpDir, 'artifacts'), + outputFormat: 'json', + }); + + expect(console.error).toHaveBeenCalledWith(expect.stringContaining('Coverage file not found at')); + expect(process.exit).toHaveBeenCalledWith(1); + }); + + it('should succeed if no uncovered lines are detected', async () => { + jest.spyOn(console, 'log').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + + // Mock git functions to simulate a covered file + const gitFetchMock = jest.fn().mockResolvedValue(); + const gitRawMock = jest.fn().mockResolvedValue('1234567890abcdef\n'); + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('file1.js\n') + .mockResolvedValueOnce('@@ -0,0 +1 @@\n+line1\n'); + simpleGit.mockReturnValue({ + fetch: gitFetchMock, + raw: gitRawMock, + diff: gitDiffMock, + }); + + // Set up a covered line + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: { '0': { start: { line: 1, column: 0 }, end: { line: 1, column: 0 } } }, + fnMap: {}, + branchMap: {}, + s: { '0': 1 }, + f: {}, + b: {}, + }, + }; + + const coverageFile = path.join(tmpDir, 'coverage-final.json'); + fs.writeFileSync(coverageFile, JSON.stringify(coverageData)); + + await main({ + coverageFile, + artifactDir: path.join(tmpDir, 'artifacts'), + outputFormat: 'json', + }); + + expect(console.log.mock.calls.flat()).toContain('All modified lines are covered by tests.'); + expect(process.exit).not.toHaveBeenCalled(); + }); + it('should detect uncovered functions and report them', async () => { + jest.spyOn(console, 'log').mockImplementation(() => {}); + jest.spyOn(console, 'error').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + + // Mock git functions + const gitFetchMock = jest.fn().mockResolvedValue(); + const gitRawMock = jest.fn().mockResolvedValue('1234567890abcdef\n'); + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('file1.js\n') + .mockResolvedValueOnce('@@ -4,0 +5 @@\n+line5\n'); + + simpleGit.mockReturnValue({ + fetch: gitFetchMock, + raw: gitRawMock, + diff: gitDiffMock, + }); + + // Coverage data with uncovered function + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: {}, + fnMap: { '1': { name: 'uncoveredFunction', loc: { start: { line: 5 }, end: { line: 10 } } } }, + branchMap: {}, + s: {}, + f: { '1': 0 }, // Mark function as uncovered + b: {}, + }, + }; + + const coverageFile = path.join(tmpDir, 'coverage-final.json'); + fs.writeFileSync(coverageFile, JSON.stringify(coverageData)); + + await main({ + coverageFile, + artifactDir: path.join(tmpDir, 'artifacts'), + outputFormat: 'json', + }); + + expect(console.log.mock.calls.flat()).toContain('Uncovered lines detected:'); + }); + + it('should detect uncovered branches and report them', async () => { + jest.spyOn(console, 'log').mockImplementation(() => {}); + jest.spyOn(console, 'error').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + + // Mock git functions + const gitFetchMock = jest.fn().mockResolvedValue(); + const gitRawMock = jest.fn().mockResolvedValue('1234567890abcdef\n'); + const gitDiffMock = jest.fn() + .mockResolvedValueOnce('file1.js\n') + .mockResolvedValueOnce('@@ -15,0 +16 @@\n+line16\n'); + + simpleGit.mockReturnValue({ + fetch: gitFetchMock, + raw: gitRawMock, + diff: gitDiffMock, + }); + + // Coverage data with uncovered branch + const normalizedFilePath = path.resolve(process.cwd(), 'file1.js'); + const coverageData = { + [normalizedFilePath]: { + path: normalizedFilePath, + statementMap: {}, + fnMap: {}, + branchMap: { + '2': { + locations: [ + { start: { line: 16 }, end: { line: 20 } }, + ], + }, + }, + s: {}, + f: {}, + b: { '2': [0] }, // Mark branch as uncovered + }, + }; + + const coverageFile = path.join(tmpDir, 'coverage-final.json'); + fs.writeFileSync(coverageFile, JSON.stringify(coverageData)); + + await main({ + coverageFile, + artifactDir: path.join(tmpDir, 'artifacts'), + outputFormat: 'json', + }); + + expect(console.log.mock.calls.flat()).toContain('Uncovered lines detected:'); + }); + }); +}); diff --git a/tools/merge-coverage.js b/tools/merge-coverage.js new file mode 100644 index 0000000000..0708962deb --- /dev/null +++ b/tools/merge-coverage.js @@ -0,0 +1,112 @@ +// src/tools/merge-coverage.js + +const fs = require('fs'); +const path = require('path'); +const { createCoverageMap } = require('istanbul-lib-coverage'); + +const DEFAULT_COVERAGE_DIR = path.resolve(__dirname, '../coverage'); +const DEFAULT_MERGED_COVERAGE_FILE = path.join(DEFAULT_COVERAGE_DIR, 'coverage-final.json'); + +/** + * Recursively find all coverage-final.json files within the coverage directory. + * @param {string} dir - Directory to search. + * @returns {string[]} - Array of file paths. + */ +function findCoverageFiles(dir) { + const coverageFiles = []; + + function traverse(currentPath) { + const entries = fs.readdirSync(currentPath, { withFileTypes: true }); + entries.forEach((entry) => { + const fullPath = path.join(currentPath, entry.name); + if (entry.isDirectory()) { + traverse(fullPath); + } else if (entry.isFile() && entry.name === 'coverage-final.json') { + coverageFiles.push(fullPath); + } + }); + } + + traverse(dir); + return coverageFiles; +} + +/** + * Merge multiple coverage-final.json files into a single coverage map. + * @param {string[]} coverageFiles - Array of coverage file paths. + * @returns {Object} - Merged coverage data. + */ +function mergeCoverageFiles(coverageFiles) { + if (coverageFiles.length === 0) { + const errorMessage = 'No coverage-final.json files found to merge.'; + // eslint-disable-next-line no-console + console.error(errorMessage); + throw new Error(errorMessage); + } + + const mergedCoverageMap = createCoverageMap({}); + + coverageFiles.forEach((file) => { + const coverageData = JSON.parse(fs.readFileSync(file, 'utf8')); + mergedCoverageMap.merge(coverageData); + }); + + return mergedCoverageMap.toJSON(); +} + +/** + * Write the merged coverage data to coverage-final.json. + * @param {Object} mergedCoverage - Merged coverage data. + * @param {string} outputFile - The output file path. + */ +function writeMergedCoverage(mergedCoverage, outputFile) { + fs.writeFileSync(outputFile, JSON.stringify(mergedCoverage), 'utf-8'); + // eslint-disable-next-line no-console + console.log(`Merged coverage written to ${outputFile}`); +} + +/** + * Main function to execute the coverage merging process. + * @param {string} coverageDir - Directory to search for coverage files. + * @param {string} mergedCoverageFile - Output file path for the merged coverage. + */ +function main( + coverageDir = DEFAULT_COVERAGE_DIR, + mergedCoverageFile = DEFAULT_MERGED_COVERAGE_FILE +) { + try { + // eslint-disable-next-line no-console + console.log('Searching for coverage-final.json files...'); + const coverageFiles = findCoverageFiles(coverageDir); + // eslint-disable-next-line no-console + console.log('Found coverage files:', coverageFiles); + + // eslint-disable-next-line no-console + console.log('Merging coverage files...'); + const mergedCoverage = mergeCoverageFiles(coverageFiles); + + // eslint-disable-next-line no-console + console.log('Writing merged coverage report...'); + writeMergedCoverage(mergedCoverage, mergedCoverageFile); + + // eslint-disable-next-line no-console + console.log('Coverage merging completed successfully.'); + } catch (error) { + // eslint-disable-next-line no-console + console.error('Error during coverage merging:', error.message); + process.exit(1); + } +} + +if (require.main === module) { + main(); +} + +module.exports = { + findCoverageFiles, + mergeCoverageFiles, + writeMergedCoverage, + main, + DEFAULT_COVERAGE_DIR, + DEFAULT_MERGED_COVERAGE_FILE, +}; diff --git a/tools/merge-coverage.test.js b/tools/merge-coverage.test.js new file mode 100644 index 0000000000..56da10b9e0 --- /dev/null +++ b/tools/merge-coverage.test.js @@ -0,0 +1,237 @@ +// tests/merge-coverage.test.js + +const fs = require('fs'); +const pathModule = require('path'); // Avoid naming conflict with 'path' variable +const os = require('os'); +const { + findCoverageFiles, + mergeCoverageFiles, + writeMergedCoverage, + main, +} = require('./merge-coverage'); + +describe('merge-coverage script', () => { + let tmpDir; + + beforeEach(() => { + // Create a temporary directory for each test + tmpDir = fs.mkdtempSync(pathModule.join(os.tmpdir(), 'merge-coverage-test-')); + }); + + afterEach(() => { + // Clean up the temporary directory after each test + fs.rmSync(tmpDir, { recursive: true, force: true }); + jest.restoreAllMocks(); + }); + + describe('findCoverageFiles', () => { + it('should return an empty array when the directory is empty', () => { + const coverageFiles = findCoverageFiles(tmpDir); + expect(coverageFiles).toEqual([]); + }); + + it('should return an empty array when there are no coverage-final.json files', () => { + const subdir = pathModule.join(tmpDir, 'subdir'); + fs.mkdirSync(subdir, { recursive: true }); + fs.writeFileSync(pathModule.join(tmpDir, 'file1.txt'), 'some content'); + fs.writeFileSync(pathModule.join(subdir, 'file2.txt'), 'other content'); + + const coverageFiles = findCoverageFiles(tmpDir); + expect(coverageFiles).toEqual([]); + }); + + it('should return an array with coverage-final.json files', () => { + const subdir = pathModule.join(tmpDir, 'subdir'); + const anotherDir = pathModule.join(tmpDir, 'anotherDir'); + fs.mkdirSync(subdir, { recursive: true }); + fs.mkdirSync(anotherDir, { recursive: true }); + + fs.writeFileSync(pathModule.join(tmpDir, 'coverage-final.json'), '{}'); + fs.writeFileSync(pathModule.join(subdir, 'coverage-final.json'), '{}'); + fs.writeFileSync(pathModule.join(anotherDir, 'not-coverage.json'), '{}'); + + const coverageFiles = findCoverageFiles(tmpDir); + expect(coverageFiles).toEqual([ + pathModule.join(tmpDir, 'coverage-final.json'), + pathModule.join(subdir, 'coverage-final.json'), + ]); + }); + }); + + describe('mergeCoverageFiles', () => { + it('should throw an error when coverageFiles is empty', () => { + expect(() => mergeCoverageFiles([])).toThrow( + 'No coverage-final.json files found to merge.' + ); + }); + + it('should merge coverage files correctly', () => { + const coverageData1 = { + 'file1.js': { + path: 'file1.js', + statementMap: { + '0': { + start: { line: 1, column: 0 }, + end: { line: 1, column: 10 }, + }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 1 }, + f: {}, + b: {}, + }, + }; + const coverageData2 = { + 'file2.js': { + path: 'file2.js', + statementMap: { + '0': { + start: { line: 1, column: 0 }, + end: { line: 1, column: 10 }, + }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 1 }, + f: {}, + b: {}, + }, + }; + + const coverageFile1 = pathModule.join(tmpDir, 'coverage1.json'); + const coverageFile2 = pathModule.join(tmpDir, 'coverage2.json'); + + fs.writeFileSync(coverageFile1, JSON.stringify(coverageData1)); + fs.writeFileSync(coverageFile2, JSON.stringify(coverageData2)); + + const coverageFiles = [coverageFile1, coverageFile2]; + + const mergedCoverage = mergeCoverageFiles(coverageFiles); + + expect(mergedCoverage['file1.js']).toBeDefined(); + expect(mergedCoverage['file2.js']).toBeDefined(); + }); + + it('should throw an error when coverage files contain invalid JSON', () => { + const coverageFile = pathModule.join(tmpDir, 'coverage-invalid.json'); + fs.writeFileSync(coverageFile, 'invalid json'); + + const coverageFiles = [coverageFile]; + + expect(() => mergeCoverageFiles(coverageFiles)).toThrow(); + }); + }); + + describe('writeMergedCoverage', () => { + it('should write the merged coverage data to the specified file', () => { + const mergedCoverage = { + 'file1.js': { /* coverage data */ }, + 'file2.js': { /* coverage data */ }, + }; + + const mergedCoverageFile = pathModule.join(tmpDir, 'coverage-final.json'); + + writeMergedCoverage(mergedCoverage, mergedCoverageFile); + + const writtenData = fs.readFileSync(mergedCoverageFile, 'utf8'); + expect(JSON.parse(writtenData)).toEqual(mergedCoverage); + }); + }); + + describe('main', () => { + beforeEach(() => { + jest.spyOn(console, 'log').mockImplementation(() => {}); + jest.spyOn(console, 'error').mockImplementation(() => {}); + jest.spyOn(process, 'exit').mockImplementation(() => {}); + }); + + it('should complete successfully when coverage files are found', () => { + const coverageData1 = { + 'file1.js': { + path: 'file1.js', + statementMap: { + '0': { + start: { line: 1, column: 0 }, + end: { line: 1, column: 10 }, + }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 1 }, + f: {}, + b: {}, + }, + }; + const coverageData2 = { + 'file2.js': { + path: 'file2.js', + statementMap: { + '0': { + start: { line: 1, column: 0 }, + end: { line: 1, column: 10 }, + }, + }, + fnMap: {}, + branchMap: {}, + s: { '0': 1 }, + f: {}, + b: {}, + }, + }; + + const coverageDir1 = pathModule.join(tmpDir, 'dir1'); + const coverageDir2 = pathModule.join(tmpDir, 'dir2'); + + fs.mkdirSync(coverageDir1, { recursive: true }); + fs.mkdirSync(coverageDir2, { recursive: true }); + + fs.writeFileSync( + pathModule.join(coverageDir1, 'coverage-final.json'), + JSON.stringify(coverageData1) + ); + fs.writeFileSync( + pathModule.join(coverageDir2, 'coverage-final.json'), + JSON.stringify(coverageData2) + ); + + const mergedCoverageFile = pathModule.join(tmpDir, 'coverage-final.json'); + + main(tmpDir, mergedCoverageFile); + + expect(console.log).toHaveBeenCalledWith('Searching for coverage-final.json files...'); + expect(console.log).toHaveBeenCalledWith('Found coverage files:', expect.any(Array)); + expect(console.log).toHaveBeenCalledWith('Merging coverage files...'); + expect(console.log).toHaveBeenCalledWith('Writing merged coverage report...'); + expect(console.log).toHaveBeenCalledWith( + `Merged coverage written to ${mergedCoverageFile}` + ); + expect(console.log).toHaveBeenCalledWith( + 'Coverage merging completed successfully.' + ); + expect(process.exit).not.toHaveBeenCalled(); + + // Verify that the merged coverage file was written + const mergedCoverage = JSON.parse( + fs.readFileSync(mergedCoverageFile, 'utf8') + ); + expect(mergedCoverage['file1.js']).toBeDefined(); + expect(mergedCoverage['file2.js']).toBeDefined(); + }); + + it('should exit with error when no coverage files are found', () => { + const mergedCoverageFile = pathModule.join(tmpDir, 'coverage-final.json'); + + main(tmpDir, mergedCoverageFile); + + expect(console.log).toHaveBeenCalledWith('Searching for coverage-final.json files...'); + expect(console.log).toHaveBeenCalledWith('Found coverage files:', []); + expect(console.log).toHaveBeenCalledWith('Merging coverage files...'); + expect(console.error).toHaveBeenCalledWith( + 'Error during coverage merging:', + 'No coverage-final.json files found to merge.' + ); + expect(process.exit).toHaveBeenCalledWith(1); + }); + }); +}); diff --git a/yarn.lock b/yarn.lock index 8ba577ebea..ce6c7ac8a2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6481,6 +6481,11 @@ eslint@^7.20.0: text-table "^0.2.0" v8-compile-cache "^2.0.3" +esm@^3.2.25: + version "3.2.25" + resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" + integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== + esniff@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/esniff/-/esniff-2.0.1.tgz#a4d4b43a5c71c7ec51c51098c1d8a29081f9b308" @@ -8112,6 +8117,11 @@ istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== +istanbul-lib-coverage@^3.2.2: + version "3.2.2" + resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz#2d166c4b0644d43a39f04bf6c2edd1e585f31756" + integrity sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg== + istanbul-lib-instrument@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" @@ -8142,6 +8152,15 @@ istanbul-lib-report@^3.0.0: make-dir "^3.0.0" supports-color "^7.1.0" +istanbul-lib-report@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz#908305bac9a5bd175ac6a74489eafd0fc2445a7d" + integrity sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== + dependencies: + istanbul-lib-coverage "^3.0.0" + make-dir "^4.0.0" + supports-color "^7.1.0" + istanbul-lib-source-maps@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" @@ -9202,6 +9221,13 @@ make-dir@^3.0.0: dependencies: semver "^6.0.0" +make-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== + dependencies: + semver "^7.5.3" + make-error-cause@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/make-error-cause/-/make-error-cause-2.3.0.tgz#ecd11875971e506d510e93d37796e5b83f46d6f9" @@ -9231,6 +9257,11 @@ mark.js@^8.11.1: resolved "https://registry.yarnpkg.com/mark.js/-/mark.js-8.11.1.tgz#180f1f9ebef8b0e638e4166ad52db879beb2ffc5" integrity sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ== +markdown-table@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-3.0.4.tgz#fe44d6d410ff9d6f2ea1797a3f60aa4d2b631c2a" + integrity sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw== + marked@^4.0.10, marked@^4.0.15: version "4.1.0" resolved "https://registry.yarnpkg.com/marked/-/marked-4.1.0.tgz#3fc6e7485f21c1ca5d6ec4a39de820e146954796" @@ -9432,6 +9463,11 @@ mobx@^6.3.2: resolved "https://registry.yarnpkg.com/mobx/-/mobx-6.6.2.tgz#9d9102a0e337e3fc09cb2d8ca0c6f00b61270d5a" integrity sha512-IOpS0bf3+hXIhDIy+CmlNMBfFpAbHS0aVHcNC+xH/TFYEKIIVDKNYRh9eKlXuVfJ1iRKAp0cRVmO145CyJAMVQ== +mock-fs@^5.4.1: + version "5.4.1" + resolved "https://registry.yarnpkg.com/mock-fs/-/mock-fs-5.4.1.tgz#b00abc658cb19dbbf282fde2f05bb751cd1e12a5" + integrity sha512-sz/Q8K1gXXXHR+qr0GZg2ysxCRr323kuN10O7CtQjraJsFDJ4SJ+0I5MzALz7aRp9lHk8Cc/YdsT95h9Ka1aFw== + module-details-from-path@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/module-details-from-path/-/module-details-from-path-1.0.3.tgz#114c949673e2a8a35e9d35788527aa37b679da2b"