diff --git a/.github/workflows/copilot.yml b/.github/workflows/copilot.yml deleted file mode 100644 index c365e9ef..00000000 --- a/.github/workflows/copilot.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: Copilot Workflows -on: - push: - branches: - - main - workflow_dispatch: - inputs: - environment: - description: Which AWS Account to use - type: choice - required: true - options: - - test - # Shared workflow consideration - # application: - # description: Application Name - # type: string/choice - # - notification - # required: true - init: - description: Initialise the application? - type: boolean - default: false - service: - description: Service Name - type: string - required: true - default: 'funding-service-design-fund-store' - port: - description: Access port - type: string - default: '80' - type: - description: Type of service to deploy - type: choice - options: - - 'Backend Service' - - 'Load Balanced Web Service' - - 'Request-Driven Web Service' - - 'Scheduled Job' - - 'Worker Service' - default: 'Backend Service' - - -jobs: - deployment: - concurrency: deploy-${{ inputs.environment || 'test' }} # Forces only one workflow at a time can run on the environment - permissions: - id-token: write # This is required for requesting the JWT - contents: read # This is required for actions/checkout - runs-on: ubuntu-latest - environment: ${{ inputs.environment || 'test' }} - steps: - - name: Git clone the repository - uses: actions/checkout@v3 - - - name: Get current date - id: currentdatetime - run: echo "::set-output name=datetime::$(date +'%Y%m%d%H%M%S')" diff --git a/.github/workflows/copilot_deploy.yml b/.github/workflows/copilot_deploy.yml new file mode 100644 index 00000000..db127fc5 --- /dev/null +++ b/.github/workflows/copilot_deploy.yml @@ -0,0 +1,112 @@ +name: Deploy to AWS +on: + workflow_dispatch: + inputs: + environment: + description: Which AWS Account to use + type: choice + required: true + options: + - dev + - test + - uat + - production + run_performance_tests: + required: false + default: false + type: boolean + description: Run performance tests + run_e2e_tests: + required: false + default: true + type: boolean + description: Run e2e tests + push: + # Ignore README markdown + # Only automatically deploy when something in the app or tests folder has changed + paths: + - '!**/README.md' + - 'app/**' + - 'tests/**' + +jobs: + paketo_build: + permissions: + packages: write + uses: communitiesuk/funding-service-design-workflows/.github/workflows/package.yml@main + with: + version_to_build: $(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') + owner: ${{ github.repository_owner }} + application: funding-service-design-fund-store + pre_deploy_tests: + secrets: + E2E_PAT: ${{secrets.E2E_PAT}} + uses: communitiesuk/funding-service-design-workflows/.github/workflows/pre-deploy.yml@main + with: + # Note - no db-name, so defaults to postgres_db + postgres_unit_testing: true + + dev_copilot_deploy: + if: inputs.environment == 'dev' || inputs.environment == '' + needs: [pre_deploy_tests, paketo_build] + concurrency: deploy-dev + secrets: + AWS_ACCOUNT: ${{ secrets.AWS_ACCOUNT }} + uses: ./.github/workflows/environment.yml + permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + with: + workspace: 'dev' + + test_copilot_deploy: + if: inputs.environment == 'test' || inputs.environment == '' + needs: [pre_deploy_tests, paketo_build] + concurrency: deploy-test + secrets: + AWS_ACCOUNT: ${{ secrets.AWS_ACCOUNT }} + uses: ./.github/workflows/environment.yml + permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + with: + workspace: 'test' + + # Allow the capability to override UAT with another branch, but ideally uat and production should be in sync as much as possible + uat_copilot_deploy: + if: inputs.environment == 'uat' || inputs.environment == '' + needs: [pre_deploy_tests, paketo_build] + concurrency: deploy-uat + secrets: + AWS_ACCOUNT: ${{ secrets.AWS_ACCOUNT }} + uses: ./.github/workflows/environment.yml + permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + with: + workspace: 'uat' + + # Only run this if the branch being deployed is main + production_copilot_deploy: + if: (inputs.environment == 'production' || inputs.environment == '') && github.ref == 'refs/heads/main' + needs: [pre_deploy_tests, paketo_build] + concurrency: deploy-production + secrets: + AWS_ACCOUNT: ${{ secrets.AWS_ACCOUNT }} + uses: ./.github/workflows/environment.yml + permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + with: + workspace: 'production' + + # Can we realistically run E2E at this stage, or just plump for application on the grounds it checks fund-store is operational? + post_deploy_tests: + needs: test_copilot_deploy + secrets: + E2E_PAT: ${{secrets.E2E_PAT}} + uses: communitiesuk/funding-service-design-workflows/.github/workflows/post-deploy.yml@main + with: + run_performance_tests: ${{ inputs.run_performance_tests }} + run_e2e_tests: ${{ inputs.run_e2e_tests }} + app_name: application diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 8ecf52ac..0bada2f1 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -3,18 +3,6 @@ name: Deploy fsd-fund-store to Gov PaaS on: workflow_dispatch: inputs: - environment: - description: Which AWS Account to use - type: choice - required: true - options: - - test - - uat - copilot: - description: Whether to deploy to AWS? - type: boolean - required: false - default: false deploy_to_dev: required: false default: false @@ -26,7 +14,6 @@ on: jobs: test_and_deploy: - if: ${{github.event.inputs.copilot != 'true'}} uses: communitiesuk/funding-service-design-workflows/.github/workflows/deploy.yml@main with: app_name: ${{ github.event.repository.name }} @@ -42,61 +29,3 @@ jobs: CF_USER: ${{secrets.CF_USERNAME}} CF_PASSWORD: ${{secrets.CF_PASSWORD}} E2E_PAT: ${{secrets.E2E_PAT}} - paketo_build: - permissions: - packages: write - uses: communitiesuk/funding-service-design-workflows/.github/workflows/package.yml@main - with: - version_to_build: $(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,') - owner: ${{ github.repository_owner }} - application: funding-service-design-fund-store - pre_deploy_tests: - if: ${{github.event.inputs.copilot == 'true'}} - secrets: - E2E_PAT: ${{secrets.E2E_PAT}} - uses: communitiesuk/funding-service-design-workflows/.github/workflows/pre-deploy.yml@main - with: - # Note - no db-name, so defaults to postgres_db - postgres_unit_testing: true - copilot_build: - if: ${{github.event.inputs.copilot == 'true'}} - needs: [pre_deploy_tests, paketo_build] - concurrency: deploy-${{ inputs.environment || 'test' }} - permissions: - id-token: write # This is required for requesting the JWT - contents: read # This is required for actions/checkout - runs-on: ubuntu-latest - environment: ${{ inputs.environment || 'test' }} - steps: - - name: Git clone the repository - uses: actions/checkout@v3 - - - name: Get current date - id: currentdatetime - run: echo "datetime=$(date +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT - - - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT }}:role/GithubCopilotDeploy - role-session-name: FUNDSTORE_COPILOT_${{ steps.currentdatetime.outputs.datetime }} - aws-region: eu-west-2 - - - name: Install AWS Copilot CLI - run: | - curl -Lo aws-copilot https://github.com/aws/copilot-cli/releases/latest/download/copilot-linux && chmod +x aws-copilot && sudo mv aws-copilot /usr/local/bin/copilot - - - name: Inject Git SHA into manifest - run: | - yq -i '.variables.GITHUB_SHA = "${{ github.sha }}"' copilot/fsd-fund-store/manifest.yml - - - name: Inject replacement image into manifest - run: | - yq -i '.image.location = "ghcr.io/communitiesuk/funding-service-design-fund-store:${{ github.ref_name == 'main' && 'latest' || github.ref_name }}"' copilot/fsd-fund-store/manifest.yml - - - name: Run database migrations - run: scripts/migration-task-script.py ${{ inputs.environment || 'test' }} 'fsd-fund-store' - - - name: Copilot deploy - run: | - copilot svc deploy --env ${{ inputs.environment || 'test' }} diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml new file mode 100644 index 00000000..62c021a7 --- /dev/null +++ b/.github/workflows/environment.yml @@ -0,0 +1,49 @@ +name: Environment Deployment +on: + workflow_call: + inputs: + workspace: + required: true + type: string + secrets: + AWS_ACCOUNT: + required: true + +jobs: + copilot_deploy: + permissions: + id-token: write # This is required for requesting the JWT + contents: read # This is required for actions/checkout + runs-on: ubuntu-latest + environment: ${{ inputs.workspace }} + steps: + - name: Git clone the repository + uses: actions/checkout@v3 + + - name: Get current date + id: currentdatetime + run: echo "datetime=$(date +'%Y%m%d%H%M%S')" >> $GITHUB_OUTPUT + + - name: configure aws credentials + uses: aws-actions/configure-aws-credentials@v2 + with: + role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT }}:role/GithubCopilotDeploy + role-session-name: FUND_STORE_${{ inputs.workspace }}_COPILOT_${{ steps.currentdatetime.outputs.datetime }} + aws-region: eu-west-2 + + - name: Install AWS Copilot CLI + run: | + curl -Lo aws-copilot https://github.com/aws/copilot-cli/releases/latest/download/copilot-linux && chmod +x aws-copilot && sudo mv aws-copilot /usr/local/bin/copilot + + - name: Inject Git SHA into manifest + run: | + yq -i '.variables.GITHUB_SHA = "${{ github.sha }}"' copilot/fsd-fund-store/manifest.yml + + - name: Inject replacement image into manifest + run: | + yq -i '.image.location = "ghcr.io/communitiesuk/funding-service-design-fund-store:${{ github.ref_name == 'main' && 'latest' || github.ref_name }}"' copilot/fsd-fund-store/manifest.yml + + - name: Copilot ${{ inputs.workspace }} deploy + id: deploy_build + run: | + copilot svc deploy --env ${{ inputs.workspace }} diff --git a/.github/workflows/manual-dev-deploy.yml b/.github/workflows/manual-dev-deploy.yml index 35b29834..0f9b6583 100644 --- a/.github/workflows/manual-dev-deploy.yml +++ b/.github/workflows/manual-dev-deploy.yml @@ -20,7 +20,7 @@ - name: install dependencies run: source .venv/bin/activate && python -m pip install --upgrade pip && pip install -r requirements.txt - name: download previous build - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 - name: Deploy to Gov PaaS uses: citizen-of-planet-earth/cf-cli-action@v2 with: diff --git a/README.md b/README.md index 92de6866..fcbb4ff5 100644 --- a/README.md +++ b/README.md @@ -18,63 +18,78 @@ Clone the repository. ### Create a Virtual environment +```bash python3 -m venv .venv +``` ### Enter the virtual environment ...either macOS using bash: +```bash source .venv/bin/activate +``` ...or if on Windows using Command Prompt: +```bash .venv\Scripts\activate.bat +``` ### Install dependencies From the top-level directory enter the command to install pip and the dependencies of the project +```bash python3 -m pip install --upgrade pip && pip install -r requirements-dev.txt - +``` NOTE: requirements-dev.txt and requirements.txt are updated using [pip-tools pip-compile](https://github.com/jazzband/pip-tools) To update requirements please manually add the dependencies in the .in files (not the requirements.txt files) Then run: +```bash pip-compile requirements.in pip-compile requirements-dev.in +``` ## How to use Enter the virtual environment as described above, then: +```bash flask run - +``` ### Run with Gunicorn In deployed environments the service is run with gunicorn. You can run the service locally with gunicorn to test First set the FLASK_ENV environment you wish to test eg: +```bash export FLASK_ENV=dev - +``` Then run gunicorn using the following command: +```bash gunicorn wsgi:app -c run/gunicorn/local.py - +``` ### Setting up for database development This service is designed to use PostgreSQL as a database, via SqlAlchemy When running the service (eg. `flask run`) you need to set the DATABASE_URL environment variable to the URL of the database you want to test with. Initialise the database: - +```bash flask db init +``` Then run existing migrations: - +```bash flask db upgrade +``` Whenever you make changes to database models, please run: - +```bash flask db migrate +``` This will create the migration files for your changes in /db/migrations. Please then commit and push these to github so that the migrations will be run in the pipelines to correctly @@ -83,58 +98,66 @@ upgrade the deployed db instances with your changes. # Database on Paas Create db service with: +```bash cf create-service postgres medium-13 fund-store-dev-db +``` Ensure the following elements are present in your `manifest.yml`. The `run_migrations_paas.py` is what initialises the database, and the `services` element binds the application to the database service. +```yaml command: scripts/run_migrations_paas.py && gunicorn wsgi:app -c run/gunicorn/devtest.py services: - fund-store-dev-db +``` # Seeding Fund Data To seed fund & round data to db for a specific fund-round (example): -``` -docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.fund_round_loaders.load_cof_r2 +```bash + docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.fund_round_loaders.load_cof_r2 ``` To seed all fund-round data to db: -``` -docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.load_all_fund_rounds +```bash + docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.load_all_fund_rounds ``` To load on an environment via cloudfoundry (modify appropriately): ```bash -cf run-task funding-service-design-fund-store[-dev|-test] --command "python -m scripts.load_all_fund_rounds" + cf run-task funding-service-design-fund-store[-dev|-test] --command "python -m scripts.load_all_fund_rounds" ``` To amend the round dates -``` -docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.amend_round_dates --round_id c603d114-5364-4474-a0c4-c41cbf4d3bbd --deadline_date "2023-03-30 12:00:00" -``` -``` -docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.amend_round_dates --round_id c603d114-5364-4474-a0c4-c41cbf4d3bbd --opens_date "2022-10-04 12:00:00" --deadline_date "2022-12-14 11:59:00" --assessment_deadline_date "2023-03-30 12:00:00" +```bash + docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.amend_round_dates --round_id c603d114-5364-4474-a0c4-c41cbf4d3bbd --deadline_date "2023-03-30 12:00:00" + + docker exec -ti $(docker ps -qf "name=fund-store") python -m scripts.amend_round_dates --round_id c603d114-5364-4474-a0c4-c41cbf4d3bbd --opens_date "2022-10-04 12:00:00" --deadline_date "2022-12-14 11:59:00" --assessment_deadline_date "2023-03-30 12:00:00" ``` + To truncate data before re-loading it run +```bash docker exec -it $(docker ps -qf "name=fund-store") inv truncate-data +``` ### Create and seed local DB - Make sure your local `DATABASE_URL` env var is set to your local postgres db (this doesn't need to actually exist yet), eg: - ``` - # pragma: allowlist nextline secret - DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/fsd_fund_store - ``` +```bash + # pragma: allowlist nextline secret + DATABASE_URL=postgresql://postgres:postgres@127.0.0.1:5432/fsd_fund_store +``` - Create and seed using the following scripts: +```bash python -m scripts.fund_round_loaders.{load_config_script} +``` ### Build with Paketo @@ -142,30 +165,31 @@ To truncate data before re-loading it run [Paketo buildpacks](https://paketo.io/) -```pack build --builder paketobuildpacks/builder:base``` +```bash + pack build --builder paketobuildpacks/builder:base +``` Example: -``` -[~/work/repos/funding-service-design-fund-store] pack build paketo-demofsd-app --builder paketobuildpacks/builder:base -*** -Successfully built image paketo-demofsd-app -``` + [~/work/repos/funding-service-design-fund-store] pack build paketo-demofsd-app --builder paketobuildpacks/builder:base + *** + Successfully built image paketo-demofsd-app + You can then use that image with docker to run a container -``` -docker run -d -p 8080:8080 --env PORT=8080 --env FLASK_ENV=dev [envs] paketo-demofsd-app +```bash + docker run -d -p 8080:8080 --env PORT=8080 --env FLASK_ENV=dev [envs] paketo-demofsd-app ``` `envs` needs to include values for each of: SENTRY_DSN GITHUB_SHA -``` -docker ps -a -CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES -42633142c619 paketo-demofsd-app "/cnb/process/web" 8 seconds ago Up 7 seconds 0.0.0.0:8080->8080/tcp peaceful_knuth +```bash + docker ps -a + CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES + 42633142c619 paketo-demofsd-app "/cnb/process/web" 8 seconds ago Up 7 seconds 0.0.0.0:8080->8080/tcp peaceful_knuth ``` # Pipelines @@ -180,17 +204,20 @@ Place brief descriptions of Pipelines here ## Unit To run all tests in a development environment run: - +```bash pytest +``` # Extras This repo comes with a .pre-commit-config.yaml, if you wish to use this do the following while in your virtual enviroment: +```bash pip install pre-commit black pre-commit install +``` Once the above is done you will have autoformatting and pep8 compliance built into your workflow. You will be notified of any pep8 errors during commits. @@ -206,7 +233,7 @@ For each AWS account, these commands will need to be run _once_ to initialise th `copilot app init pre-award` - this links the pre-award app with the current service, and associates the next commands with the service. Essentially, this provides context for the service to run under -``` +```bash copilot init \ --name fsd-fund-store \ --app pre-award \ diff --git a/api/routes.py b/api/routes.py index c71f67f1..af1fa5ec 100644 --- a/api/routes.py +++ b/api/routes.py @@ -149,6 +149,7 @@ def get_available_flag_allocations(fund_id, round_id): from config.fund_loader_config.cof.cof_r2 import COF_ROUND_2_WINDOW_3_ID from config.fund_loader_config.night_shelter.ns_r2 import NIGHT_SHELTER_ROUND_2_ID from config.fund_loader_config.night_shelter.ns_r2 import NIGHT_SHELTER_FUND_ID + from config.fund_loader_config.cyp.cyp_r1 import CYP_FUND_ID, CYP_ROUND_1_ID cof_teams = [ {"key": "ASSESSOR", "value": "Assessor"}, @@ -166,7 +167,12 @@ def get_available_flag_allocations(fund_id, round_id): {"key": "RS_ADVISORS", "value": "RS Advisors"}, ] - if fund_id == COF_FUND_ID and round_id == COF_ROUND_2_WINDOW_2_ID: + cyp_teams = [ + {"key": "COMMERCIAL_ASSESSOR", "value": "Commercial Assessor"}, + {"key": "LEAD_ASSESSOR", "value": "Lead Assessor"}, + ] + + if fund_id == COF_FUND_ID and round_id in COF_ROUND_2_WINDOW_2_ID: return cof_teams elif fund_id == COF_FUND_ID and round_id == COF_ROUND_2_WINDOW_3_ID: return cof_teams @@ -176,5 +182,7 @@ def get_available_flag_allocations(fund_id, round_id): return cof_teams elif fund_id == NIGHT_SHELTER_FUND_ID and round_id == NIGHT_SHELTER_ROUND_2_ID: return nstf_teams + elif fund_id == CYP_FUND_ID and round_id == CYP_ROUND_1_ID: + return cyp_teams else: abort(404) diff --git a/config/fund_loader_config/cyp/cyp_r1.py b/config/fund_loader_config/cyp/cyp_r1.py index 89222e7e..4806d0aa 100644 --- a/config/fund_loader_config/cyp/cyp_r1.py +++ b/config/fund_loader_config/cyp/cyp_r1.py @@ -171,7 +171,12 @@ "feedback_link": "", "project_name_field_id": "bsUoNG", "application_guidance": CYP_APPLICATION_GUIDANCE, - "guidance_url": "", # todo, fill in once we have, and re-run import script. + "guidance_url": ( + "https://www.gov.uk/government/publications/" + "the-children-and-young-peoples-resettlement-" + "fund-prospectus/the-children-and-young-peoples-" + "resettlement-fund-prospectus#scoring-criteria" + ), "all_uploaded_documents_section_available": False, "application_fields_download_available": False, "display_logo_on_pdf_exports": False, diff --git a/copilot/fsd-fund-store/addons/fsd-fund-store-cluster.yml b/copilot/fsd-fund-store/addons/fsd-fund-store-cluster.yml index 068b4516..bcfa5791 100644 --- a/copilot/fsd-fund-store/addons/fsd-fund-store-cluster.yml +++ b/copilot/fsd-fund-store/addons/fsd-fund-store-cluster.yml @@ -21,8 +21,12 @@ Mappings: "DBMinCapacity": 0.5 # AllowedValues: from 0.5 through 128 "DBMaxCapacity": 8 # AllowedValues: from 0.5 through 128 BastionMap: + dev: + "SecurityGroup": "sg-0b6c7aabb95bf14a9" test: "SecurityGroup": "sg-0cf75a004dbade7b8" + uat: + "SecurityGroup": "sg-04017abfef2079894" Resources: fsdfundstoreclusterDBSubnetGroup: diff --git a/scripts/all_questions/metadata_utils.py b/scripts/all_questions/metadata_utils.py index 8a7cea39..7461829a 100644 --- a/scripts/all_questions/metadata_utils.py +++ b/scripts/all_questions/metadata_utils.py @@ -14,6 +14,8 @@ from scripts.all_questions.read_forms import remove_lowest_in_hierarchy from scripts.all_questions.read_forms import strip_leading_numbers +FIELD_TYPES_WITH_MAX_WORDS = ["freetextfield", "multilinetextfield"] + def get_all_child_nexts(page: dict, child_nexts: list, all_pages: dict): """Recursively builds a list of everything that could come next from this page, @@ -298,7 +300,7 @@ def determine_title_and_text_for_component( child_title, child_text = determine_title_and_text_for_component( child, include_html_components, form_lists, is_child=True ) - if child["type"].casefold() == "multilinetextfield": + if child["type"].casefold() in FIELD_TYPES_WITH_MAX_WORDS: first_column_title = component["options"]["columnTitles"][0].casefold() text.append( f"{child_title} (Max {child['options']['maxWords']} words per" @@ -325,8 +327,9 @@ def determine_title_and_text_for_component( text = [] extract_from_html(soup, text) update_wording_for_multi_input_fields(text) - if component["type"].casefold() == "multilinetextfield" and not is_child: - text.append(f"(Max {component['options']['maxWords']} words)") + + if component["type"].casefold() in FIELD_TYPES_WITH_MAX_WORDS and not is_child: + text.append(f"(Max {component['options']['maxWords']} words)") if "list" in component: # include available options for lists @@ -406,6 +409,7 @@ def build_components_from_page( condition_value, list_name=c["list"] if "list" in c else None, form_lists=form_lists, + lang=lang, ) text.append( f"If '{condition_text}', go to {destination}" diff --git a/scripts/data_updates/patch_cypr1_guidance_201023.py b/scripts/data_updates/patch_cypr1_guidance_201023.py new file mode 100644 index 00000000..c6e17154 --- /dev/null +++ b/scripts/data_updates/patch_cypr1_guidance_201023.py @@ -0,0 +1,33 @@ +import config.fund_loader_config.cyp.cyp_r1 as cyp_r1 +from db import db +from db.models.round import Round +from flask import current_app +from sqlalchemy import update + + +def update_round_guidance(round_config): + current_app.logger.info( + f"Round: {round_config['short_name']}, id: {round_config['id']}" + ) + current_app.logger.info("\t\tUpdating round guidance") + stmt = ( + update(Round) + .where(Round.id == round_config["id"]) + .values(guidance_url=round_config["guidance_url"]) + ) + + db.session.execute(stmt) + db.session.commit() + + +def main() -> None: + current_app.logger.info("Updating guidance url for CYP R1") + update_round_guidance(cyp_r1.round_config[0]) + current_app.logger.info("Updates complete") + + +if __name__ == "__main__": + from app import app + + with app.app_context(): + main() diff --git a/tests/test_generate_all_questions.py b/tests/test_generate_all_questions.py index a720e72f..f6fabd35 100644 --- a/tests/test_generate_all_questions.py +++ b/tests/test_generate_all_questions.py @@ -404,7 +404,8 @@ def test_build_components_bullets_in_hint(): ) components = build_components_from_page(page_json, include_html_components=False) assert len(components) == 1 - assert len(components[0]["text"]) == 2 + assert len(components[0]["text"]) == 3 + assert components[0]["text"][2] == "(Max 250 words)" assert len(components[0]["text"][1]) == 3