Skip to content

Commit

Permalink
Merge pull request #101 from uktrade/feature/resolve-celery
Browse files Browse the repository at this point in the history
Feature/resolve celery
  • Loading branch information
hareshkainthdbt authored Dec 10, 2024
2 parents 34398bb + a907c81 commit 898e753
Show file tree
Hide file tree
Showing 72 changed files with 914 additions and 518 deletions.
4 changes: 2 additions & 2 deletions .copilot/image_build_run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ set -e
# Add commands below to run inside the container after all the other buildpacks have been applied
export BUILD_STEP='True'
export COPILOT_ENVIRONMENT_NAME='build'
export DJANGO_SETTINGS_MODULE="config.settings.base"
export DJANGO_SETTINGS_MODULE="fbr.settings"

poetry run python fbr/manage.py collectstatic --noinput
poetry run python manage.py collectstatic --noinput
4 changes: 2 additions & 2 deletions .github/workflows/code_quality.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,9 +65,9 @@ jobs:
run: |
npm install
npm run build
DJANGO_SETTINGS_MODULE=config.settings.local poetry run fbr/manage.py collectstatic --noinput
DJANGO_SETTINGS_MODULE=fbr.settings poetry run python manage.py collectstatic --noinput
# poetry run fbr/manage.py makemigrations --check --dry-run
# poetry run manage.py makemigrations --check --dry-run

# - name: Run tests
# run: poetry run pytest fbr/tests
Expand Down
4 changes: 2 additions & 2 deletions .secrets.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,9 @@
"filename": "Makefile",
"hashed_secret": "afc848c316af1a89d49826c5ae9d00ed769415f3",
"is_verified": false,
"line_number": 102
"line_number": 65
}
]
},
"generated_at": "2024-09-09T12:08:54Z"
"generated_at": "2024-12-10T03:50:08Z"
}
19 changes: 13 additions & 6 deletions local_deployment/Dockerfile → Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,25 @@ ENV PYTHONUNBUFFERED=1 \
PYTHONDONTWRITEBYTECODE=1 \
DEBUG=1 \
DJANGO_ADMIN=1 \
DJANGO_SETTINGS_MODULE=config.settings.local
DJANGO_SETTINGS_MODULE=fbr.settings

# Install nodejs
RUN apt install -y curl && \
curl -sL https://deb.nodesource.com/setup_20.x | bash - && \
apt install -y nodejs

WORKDIR /app
COPY . /app

# Install poetry and project dependencies
RUN pip install poetry==1.8.3 && \
poetry install --without dev
RUN pip install poetry

# Copy only the requirements.txt into the container
COPY requirements.txt /app/

# Install the dependencies specified in requirements.txt
RUN pip install --no-cache-dir -r requirements.txt

COPY . /app

CMD ["local_deployment/entry.sh"]
COPY entry.sh /entry.sh
RUN chmod +x /entry.sh
ENTRYPOINT ["/entry.sh"]
35 changes: 22 additions & 13 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,14 @@ drop-database: # Delete project's postgres database
fi

build: # Build docker containers for local execution
docker build --no-cache -f local_deployment/Dockerfile -t local_deployment .
docker build --no-cache -f Dockerfile -t local_deployment .
docker compose build

collectstatic: # Run Django collectstatic
docker compose run --rm web poetry run python fbr/manage.py collectstatic --noinput
docker compose run --rm web poetry run python manage.py collectstatic --noinput

admin: # Create a superuser
docker compose exec web poetry run python fbr/manage.py createsuperuser --username admin --email admin@localhost
docker compose exec web poetry run python manage.py createsuperuser --username admin --email admin@localhost

first-use: # Initialise for local execution
@echo "$(COLOUR_GREEN)Preparing for first use$(COLOUR_NONE)"
Expand All @@ -62,6 +62,7 @@ first-use: # Initialise for local execution
@echo "$(COLOUR_GREEN)Destroy containers with 'make down'$(COLOUR_NONE)"

up: # Build, (re)create and start containers
export DATABASE_URL=postgres://postgres:postgres@localhost:5432/fbr
docker compose up -d
@echo "$(COLOUR_GREEN)Services are up - use 'make logs' to view service logs$(COLOUR_NONE)"

Expand Down Expand Up @@ -90,27 +91,29 @@ logs: # View container logs
docker compose logs -f -t

test: # Run tests
pytest fbr/tests --cov-report term
pytest app/tests --cov-report term

bdd: # Run BDD tests
HEADLESS_MODE=false SLOW_MO_MS=500 behave ./fbr/tests/bdd/features/ --tags=LOCAL
HEADLESS_MODE=false SLOW_MO_MS=500 behave ./app/tests/bdd/features/ --tags=LOCAL

django-shell: # Run a Django shell (on container)
docker compose run web poetry run python fbr/manage.py shell
docker compose run web poetry run python manage.py shell

django-shell-local: # Run a Django shell (local django instance)
DATABASE_URL=postgres://postgres:postgres@localhost:5432/fbr \
DEBUG=True \
DJANGO_ADMIN=False \
DJANGO_SECRET_KEY=walls-have-ears \
DJANGO_SETTINGS_MODULE=config.settings.local \
poetry run python fbr/manage.py shell
DJANGO_SETTINGS_MODULE=fbr.settings \
poetry run python manage.py shell

migrate: # Run Django migrate
docker compose run --rm web poetry run python fbr/manage.py migrate --noinput
export DATABASE_URL=postgres://postgres:postgres@localhost:5432/fbr && \
python manage.py migrate

migrations: # Run Django makemigrations
docker compose run --rm web poetry run python fbr/manage.py makemigrations --noinput
export DATABASE_URL=postgres://postgres:postgres@localhost:5432/fbr && \
python manage.py makemigrations

lint: # Run all linting
make black
Expand All @@ -126,8 +129,8 @@ secrets-baseline: # Generate a new secrets baseline file
poetry run detect-secrets scan > .secrets.baseline

rebuild_cache:
export PYTHONPATH=./fbr && \
export DJANGO_SETTINGS_MODULE='fbr.config.settings.local' && \
export PYTHONPATH=. && \
export DJANGO_SETTINGS_MODULE='fbr.settings' && \
export DATABASE_URL=postgres://postgres:postgres@localhost:5432/fbr && \
poetry install && \
poetry run rebuild-cache
Expand All @@ -137,5 +140,11 @@ setup_local: # Set up the local environment
$(MAKE) first-use
$(MAKE) start
$(MAKE) migrate
$(MAKE) rebuild_cache
@echo "$(COLOUR_GREEN)Local setup complete.$(COLOUR_NONE)"

setup_local_force_rebuild:
@echo "$(COLOUR_GREEN)Will run initial setup, followed by cache rebuild for local environment...$(COLOUR_NONE)"
$(MAKE) setup_local
@echo "$(COLOUR_GREEN)Manual cache rebuild (not using Celery task)...$(COLOUR_NONE)"
$(MAKE) rebuild_cache
@echo "$(COLOUR_GREEN)Cache rebuilt complete.$(COLOUR_NONE)"
5 changes: 3 additions & 2 deletions Procfile
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
web: bash paas_entrypoint.sh
celery-worker: celery --app fbr.config.celery worker --task-events --loglevel INFO
celery-beat: celery --app fbr.config.celery beat --loglevel INFO
celery-worker: celery --app fbr.celery_app worker --task-events --loglevel INFO
celery-beat: celery --app fbr.celery_app beat --loglevel INFO
check: python manage.py check
File renamed without changes.
File renamed without changes.
File renamed without changes.
18 changes: 13 additions & 5 deletions fbr/cache/legislation.py → app/cache/legislation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,17 @@

import logging
import re
import time
import xml.etree.ElementTree as ET # nosec BXXX

from typing import Optional

import requests # type: ignore

from fbr.cache.construction_legislation import construction_legislation_dataframe
from fbr.search.config import SearchDocumentConfig
from fbr.search.utils.date import convert_date_string_to_obj
from fbr.search.utils.documents import ( # noqa: E501
from app.cache.construction_legislation import construction_legislation_dataframe
from app.search.config import SearchDocumentConfig
from app.search.utils.date import convert_date_string_to_obj
from app.search.utils.documents import ( # noqa: E501
generate_short_uuid,
insert_or_update_document,
)
Expand Down Expand Up @@ -123,6 +124,8 @@ def build_cache(self, config: SearchDocumentConfig):
logger.info("building legislation cache...")
dataset = construction_legislation_dataframe()

failed_url_fetches = []

# For each row, get the URL from the column named
# 'URI to Extract XML Data'
# and store the XML data in a list
Expand Down Expand Up @@ -185,10 +188,15 @@ def build_cache(self, config: SearchDocumentConfig):

# Insert or update the document
insert_or_update_document(document_json)

# # Sleep for a short time to avoid rate limiting
# time.sleep(0.5)
except Exception as e:
logger.error(f"error fetching data from {url}: {e}")
raise e
failed_url_fetches.append(url)

if failed_url_fetches:
logger.warning(f"failed to fetch data {len(failed_url_fetches)} legislation sources: {failed_url_fetches}")
def _to_json(
self,
description,
Expand Down
15 changes: 8 additions & 7 deletions fbr/cache/manage_cache.py → app/cache/manage_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,23 +3,24 @@

import django

os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
django.setup()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fbr.settings")

# Initialize Django setup
django.setup()

import time

from fbr.cache.legislation import Legislation
from fbr.cache.public_gateway import PublicGateway
from fbr.search.config import SearchDocumentConfig
from fbr.search.utils.documents import clear_all_documents
from app.cache.legislation import Legislation
from app.cache.public_gateway import PublicGateway
from app.search.config import SearchDocumentConfig
from app.search.utils.documents import clear_all_documents


def rebuild_cache():
try:
start = time.time()
clear_all_documents()
config = SearchDocumentConfig(search_query="", timeout=20)
config = SearchDocumentConfig(search_query="", timeout=3)
Legislation().build_cache(config)
PublicGateway().build_cache(config)
end = time.time()
Expand Down
4 changes: 2 additions & 2 deletions fbr/cache/public_gateway.py → app/cache/public_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

import requests # type: ignore

from fbr.search.utils.date import convert_date_string_to_obj
from fbr.search.utils.documents import ( # noqa: E501
from app.search.utils.date import convert_date_string_to_obj
from app.search.utils.documents import ( # noqa: E501
generate_short_uuid,
insert_or_update_document,
)
Expand Down
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion fbr/core/apps.py → app/core/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@


class CoreConfig(AppConfig):
name = "core"
name = "app.core"
verbose_name = "Find business regulations core application functionality"
default_auto_field = "django.db.models.BigAutoField"
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion fbr/search/apps.py → app/search/apps.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,6 @@ class SearchConfig(AppConfig):
"""

name = "search"
name = "app.search"
verbose_name = "Find business regulations application functionality"
default_auto_field = "django.db.models.BigAutoField"
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
24 changes: 12 additions & 12 deletions fbr/search/tests/test_search.py → app/search/tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,18 @@

from unittest.mock import MagicMock, call, patch

from search.utils.search import create_search_query
from app.search.utils.search import create_search_query


class TestCreateSearchQuery(unittest.TestCase):

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_single_word_query(self, mock_search_query):
result = create_search_query("test")
mock_search_query.assert_called_with("test", search_type="plain")
self.assertEqual(result, mock_search_query.return_value)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_implicit_and_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand All @@ -35,7 +35,7 @@ def test_implicit_and_search_operator_query(self, mock_search_query):
# Assert the AND operation was applied
mock_query1.__and__.assert_called_once_with(mock_query2)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_multiple_implicit_and_search_operator_query(
self, mock_search_query
):
Expand All @@ -61,7 +61,7 @@ def test_multiple_implicit_and_search_operator_query(
# Assert the AND operation was applied
mock_query1.__and__.assert_called_with(mock_query3)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_and_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand All @@ -83,7 +83,7 @@ def test_and_search_operator_query(self, mock_search_query):
# Assert the AND operation was applied
mock_query1.__and__.assert_called_once_with(mock_query2)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_multiple_and_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand All @@ -107,7 +107,7 @@ def test_multiple_and_search_operator_query(self, mock_search_query):
# Assert the AND operation was applied
mock_query1.__and__.assert_called_with(mock_query3)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_or_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand All @@ -129,7 +129,7 @@ def test_or_search_operator_query(self, mock_search_query):
# Assert the AND operation was applied
mock_query1.__or__.assert_called_once_with(mock_query2)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_multple_or_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand All @@ -153,7 +153,7 @@ def test_multple_or_search_operator_query(self, mock_search_query):
# Assert the AND operation was applied
mock_query1.__or__.assert_called_with(mock_query3)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_multiple_or_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand All @@ -177,15 +177,15 @@ def test_multiple_or_search_operator_query(self, mock_search_query):
# Assert the AND operation was applied
mock_query1.__or__.assert_called_with(mock_query3)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_phrase_search_query(self, mock_search_query):
result = create_search_query('"test trial"')
mock_search_query.assert_called_with(
"test trial", search_type="phrase"
)
self.assertEqual(result, mock_search_query.return_value)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_and_multiple_single_single_phrase_search_query(
self, mock_search_query
):
Expand Down Expand Up @@ -221,7 +221,7 @@ def test_and_multiple_single_single_phrase_search_query(
# Assert the AND operation was applied
mock_query1.__and__.assert_called_with(mock_query5)

@patch("search.utils.search.SearchQuery", autospec=True)
@patch("app.search.utils.search.SearchQuery", autospec=True)
def test_single_or_and_search_operator_query(self, mock_search_query):
# Mock SearchQuery instances
mock_query1 = MagicMock(name="MockQuery1")
Expand Down
Empty file added app/search/utils/__init__.py
Empty file.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from django.db.models import QuerySet

from search.models import DataResponseModel, logger
from app.search.models import DataResponseModel, logger


def clear_all_documents():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db.models import QuerySet

from search.config import SearchDocumentConfig
from app.search.config import SearchDocumentConfig

logger = logging.getLogger(__name__)

Expand Down
Loading

0 comments on commit 898e753

Please sign in to comment.