-
Notifications
You must be signed in to change notification settings - Fork 62
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Core] [Logging] | Integration logs not being ingested #1120
Changes from 2 commits
836439d
6f19c92
6c3a232
6ce1114
ddf2d8b
0dad786
6419b8e
9683385
685261f
6553c58
52bd7f2
b450e5b
db55df7
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -3,7 +3,8 @@ | |||||
|
||||||
import httpx | ||||||
from loguru import logger | ||||||
|
||||||
import copy | ||||||
from traceback import format_exception | ||||||
from port_ocean.clients.port.authentication import PortAuthentication | ||||||
from port_ocean.clients.port.utils import handle_status_code | ||||||
from port_ocean.log.sensetive import sensitive_log_filter | ||||||
|
@@ -99,15 +100,24 @@ async def patch_integration( | |||||
handle_status_code(response) | ||||||
return response.json()["integration"] | ||||||
|
||||||
def sirealize_logs(self,logs: list[dict[str, Any]]): | ||||||
Tankilevitch marked this conversation as resolved.
Show resolved
Hide resolved
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This seems like a pretty isolated method, lets add a test for it |
||||||
_logs = copy.deepcopy(logs) | ||||||
for log in _logs: | ||||||
if 'extra' in log.keys() and 'exc_info' in log['extra'].keys() and isinstance(log['extra']['exc_info'],Exception): | ||||||
Tankilevitch marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
sirealized_exception = ''.join(format_exception(log['extra']['exc_info'])) | ||||||
Tankilevitch marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
log['extra']['exc_info'] = sirealized_exception | ||||||
return _logs | ||||||
Tankilevitch marked this conversation as resolved.
Show resolved
Hide resolved
|
||||||
|
||||||
async def ingest_integration_logs(self, logs: list[dict[str, Any]]) -> None: | ||||||
logger.debug("Ingesting logs") | ||||||
sirealized_logs=self.sirealize_logs(logs) | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
log_attributes = await self.get_log_attributes() | ||||||
headers = await self.auth.headers() | ||||||
response = await self.client.post( | ||||||
log_attributes["ingestUrl"], | ||||||
headers=headers, | ||||||
json={ | ||||||
"logs": logs, | ||||||
"logs": sirealized_logs, | ||||||
}, | ||||||
) | ||||||
handle_status_code(response, should_log=False) | ||||||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -37,6 +37,7 @@ def __init__( | |
self.flush_size = flush_size | ||
self.last_flush_time = time.time() | ||
self._serialized_buffer: list[dict[str, Any]] = [] | ||
self._thread_pool: list[threading.Thread] = [] | ||
|
||
@property | ||
def ocean(self) -> Ocean | None: | ||
|
@@ -60,6 +61,11 @@ def shouldFlush(self, record: logging.LogRecord) -> bool: | |
): | ||
return True | ||
return False | ||
|
||
def wait_for_lingering_threads(self)->None: | ||
for thread in self._thread_pool: | ||
if thread.is_alive(): | ||
thread.join() | ||
|
||
def flush(self) -> None: | ||
if self.ocean is None or not self.buffer: | ||
|
@@ -70,15 +76,24 @@ def _wrap_event_loop(_ocean: Ocean, logs_to_send: list[dict[str, Any]]) -> None: | |
loop.run_until_complete(self.send_logs(_ocean, logs_to_send)) | ||
loop.close() | ||
|
||
def clear_thread_pool()->None: | ||
for thread in self._thread_pool: | ||
if not thread.is_alive(): | ||
self._thread_pool.remove(thread) | ||
|
||
self.acquire() | ||
logs = list(self._serialized_buffer) | ||
if logs: | ||
self.buffer.clear() | ||
self._serialized_buffer.clear() | ||
self.last_flush_time = time.time() | ||
threading.Thread(target=_wrap_event_loop, args=(self.ocean, logs)).start() | ||
clear_thread_pool() | ||
thread=threading.Thread(target=_wrap_event_loop, args=(self.ocean, logs)) | ||
thread.start() | ||
self._thread_pool.append(thread) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. are we supporting multiple threads flushing logs concurrently? |
||
self.release() | ||
|
||
|
||
async def send_logs( | ||
self, _ocean: Ocean, logs_to_send: list[dict[str, Any]] | ||
) -> None: | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
git grep -i sirealize