Skip to content

Commit

Permalink
update:logging levels from info to debug
Browse files Browse the repository at this point in the history
Changed all logger.info statements to logger.debug for less verbose logging in non-critical areas. This reduces log clutter and focuses info-level logging on more significant events.
  • Loading branch information
hareshkainthdbt committed Nov 25, 2024
1 parent 0c03707 commit 123f4e4
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 18 deletions.
12 changes: 6 additions & 6 deletions orp/orp_search/utils/documents.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,35 +9,34 @@


def clear_all_documents():
logger.info("clearing all documents from table...")
logger.debug("clearing all documents from table...")
try:
DataResponseModel.objects.all().delete()
logger.info("documents cleared from table")
logger.debug("documents cleared from table")
except Exception as e:
logger.error(f"error clearing documents: {e}")
throw_error(f"error clearing documents: {e}")


def insert_or_update_document(document_json):
try:
logger.info("creating document...")
logger.debug("creating document...")
logger.debug(f"document: {document_json}")
# Try to create a new document
document = DataResponseModel(**document_json)
document.full_clean()
document.save()
except Exception as e:
logger.error(f"error creating document: {document_json}")
logger.error(f"error: {e}")
logger.info("document already exists, updating...")
logger.debug("document already exists, updating...")

# If a duplicate key error occurs, update the existing document
try:
document = DataResponseModel.objects.get(pk=document_json["id"])
for key, value in document_json.items():
setattr(document, key, value)
document.save()
logger.info(f"document updated: {document}")
logger.debug(f"document updated: {document}")
except Exception as e:
logger.error(f"error updating document: {document_json}")
logger.error(f"error: {e}")
Expand Down Expand Up @@ -93,6 +92,7 @@ def generate_short_uuid():
str: A URL-safe base64 encoded UUID truncated to 22 characters.
"""
uid = uuid.uuid4()

# Encode it to base64
uid_b64 = base64.urlsafe_b64encode(uid.bytes).rstrip(b"=").decode("ascii")
return uid_b64[
Expand Down
9 changes: 5 additions & 4 deletions orp/orp_search/utils/paginate.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def paginate(
"""
start_time = time.time()

logger.info("paginating documents...")
logger.debug("paginating documents...")
paginator = Paginator(results, config.limit)
try:
paginated_documents = paginator.page(config.offset)
Expand All @@ -70,7 +70,7 @@ def paginate(
paginated_documents = paginator.page(paginator.num_pages)

end_time = time.time()
logger.info(
logger.debug(
f"time taken to paginate (before description +/ regulatory topics):"
f" {round(end_time - start_time, 2)} seconds"
)
Expand All @@ -88,7 +88,7 @@ def paginate(
).split("\n")

end_time = time.time()
logger.info(
logger.debug(
f"time taken to paginate "
f"(after description +/ regulatory topics): "
f"{round(end_time - start_time, 2)} seconds"
Expand Down Expand Up @@ -121,7 +121,8 @@ def paginate(
context["start_index"] = paginated_documents.start_index()
context["end_index"] = paginated_documents.end_index()
end_time = time.time()
logger.info(

logger.debug(
f"time taken to paginate (after adding to context): "
f"{round(end_time - start_time, 2)} seconds"
)
Expand Down
16 changes: 8 additions & 8 deletions orp/orp_search/utils/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,11 @@ def search_database(

# Sanatize the query string
query_str = sanitize_input(config.search_query)
logger.info(f"sanitized search query: {query_str}")
logger.debug(f"sanitized search query: {query_str}")

# Generate query object
query_objs = _create_search_query(query_str)
logger.info(f"search query objects: {query_objs}")
logger.debug(f"search query objects: {query_objs}")

# Search across specific fields
vector = SearchVector("title", "description", "regulatory_topics")
Expand Down Expand Up @@ -142,7 +142,7 @@ def search_database(


def search(context: dict, request: HttpRequest) -> dict:
logger.info("received search request: %s", request)
logger.debug("received search request: %s", request)
start_time = time.time()

search_query = request.GET.get("query", "")
Expand Down Expand Up @@ -175,13 +175,13 @@ def search(context: dict, request: HttpRequest) -> dict:
context = paginate(context, config, results)
pag_end_time = time.time()

logger.info(
logger.debug(
f"time taken to paginate (called from views.py): "
f"{round(pag_end_time - pag_start_time, 2)} seconds"
)

end_time = time.time()
logger.info(
logger.debug(
f"time taken to search and produce response: "
f"{round(end_time - start_time, 2)} seconds"
)
Expand All @@ -195,7 +195,7 @@ class Trim(Func):


def get_publisher_names():
logger.info("getting publisher names...")
logger.debug("getting publisher names...")
publishers_list = []

try:
Expand All @@ -213,7 +213,7 @@ def get_publisher_names():

except Exception as e:
logger.error(f"error getting publisher names: {e}")
logger.info("returning empty list of publishers")
logger.debug("returning empty list of publishers")

logger.info(f"publishers found: {publishers_list}")
logger.debug(f"publishers found: {publishers_list}")
return publishers_list

0 comments on commit 123f4e4

Please sign in to comment.