Skip to content

Commit

Permalink
chore:minor refactoring and removed unused endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
hareshkainthdbt committed Dec 1, 2024
1 parent 1ebf0f0 commit 07ac8e1
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 37 deletions.
2 changes: 1 addition & 1 deletion fbr/cache/public_gateway.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def build_cache(self, config):
row["date_valid"] = convert_date_string_to_obj(
row.get("date_valid")
)
row["id"] = (generate_short_uuid(),)
row["id"] = generate_short_uuid()

row["publisher_id"] = (
None
Expand Down
36 changes: 0 additions & 36 deletions fbr/config/urls.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Find business regulations URL configuration."""

import logging
import time

from rest_framework import routers, serializers, status, viewsets
from rest_framework.decorators import action
Expand All @@ -14,9 +13,7 @@
import core.views as core_views
import search.views as search_views

from search.config import SearchDocumentConfig
from search.models import DataResponseModel
from search.utils.documents import clear_all_documents
from search.utils.search import get_publisher_names, search

urls_logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -91,38 +88,6 @@ def search(self, request, *args, **kwargs):
)


class RebuildCacheViewSet(viewsets.ViewSet):
@action(detail=False, methods=["post"], url_path="rebuild")
def rebuild_cache(self, request, *args, **kwargs):
from search.legislation import Legislation
from search.public_gateway import PublicGateway

tx_begin = time.time()
try:
clear_all_documents()
config = SearchDocumentConfig(search_query="", timeout=20)
Legislation().build_cache(config)
PublicGateway().build_cache(config)
except Exception as e:
return Response(
data={"message": f"[urls] error clearing documents: {e}"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)

tx_end = time.time()
urls_logger.info(
f"time taken to rebuild cache: "
f"{round(tx_end - tx_begin, 2)} seconds"
)
return Response(
data={
"message": "rebuilt cache",
"duration": round(tx_end - tx_begin, 2),
},
status=status.HTTP_200_OK,
)


class PublishersViewSet(viewsets.ViewSet):
@action(detail=False, methods=["get"], url_path="publishers")
def publishers(self, request, *args, **kwargs):
Expand Down Expand Up @@ -151,7 +116,6 @@ def publishers(self, request, *args, **kwargs):
# Routers provide an easy way of automatically determining the URL conf.
router = routers.DefaultRouter()
router.register(r"v1", DataResponseViewSet, basename="search")
router.register(r"v1/cache", RebuildCacheViewSet, basename="rebuild")
router.register(r"v1/retrieve", PublishersViewSet, basename="publishers")

urlpatterns = [
Expand Down

0 comments on commit 07ac8e1

Please sign in to comment.