Skip to content

Commit

Permalink
Fixes lint errors
Browse files Browse the repository at this point in the history
  • Loading branch information
shariff-6 committed Oct 29, 2024
1 parent 95fd038 commit 016af45
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 24 deletions.
25 changes: 17 additions & 8 deletions integrations/pagerduty/clients/pagerduty.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

MAX_CONCURRENT_REQUESTS = 1
SAFE_MINIMUM_FOR_RATE_LIMITS = 3
MAX_RETRY_COUNT = 5
MAX_RETRY_COUNT = 5


class PagerDutyClient:
Expand Down Expand Up @@ -76,6 +76,7 @@ def api_auth_param(self) -> dict[str, Any]:
"Content-Type": "application/json",
}
}

async def _handle_rate_limiting(self, response: httpx.Response) -> None:
if response.status_code == 429:
await self._rate_limiter.handle_rate_limiting(response)
Expand Down Expand Up @@ -241,7 +242,9 @@ async def get_incident_analytics(self, incident_id: str) -> dict[str, Any]:
logger.error(f"HTTP occurred while fetching incident analytics data: {e}")
return {}

async def get_service_analytics(self, service_id: str, months_period: int = 6) -> Dict[str, Any]:
async def get_service_analytics(
self, service_id: str, months_period: int = 6
) -> Dict[str, Any]:
logger.info(f"Fetching analytics for service: {service_id}")
url = f"{self.api_url}/analytics/metrics/incidents/services"
date_ranges = get_date_range_for_last_n_months(months_period)
Expand All @@ -260,20 +263,26 @@ async def get_service_analytics(self, service_id: str, months_period: int = 6) -
body,
)

async def _handle_service_analytics_request(self, url: str, body: Dict[str, Any]) -> Dict[str, Any]:
async def _handle_service_analytics_request(
self, url: str, body: Dict[str, Any]
) -> Dict[str, Any]:
async with self._semaphore:
response = await self.http_client.post(url, json=body)

response.raise_for_status()
response.raise_for_status()

data = response.json().get("data", [])
if not data:
logger.info(f"No analytics data found for service: {body['filters']['service_ids'][0]}")
logger.info(
f"No analytics data found for service: {body['filters']['service_ids'][0]}"
)
return {}

logger.info(f"Successfully fetched analytics for service: {body['filters']['service_ids'][0]}")
return data[0]

logger.info(
f"Successfully fetched analytics for service: {body['filters']['service_ids'][0]}"
)
return data[0]

async def fetch_and_cache_users(self) -> None:
async for users in self.paginate_request_to_pager_duty(data_key=USER_KEY):
for user in users:
Expand Down
15 changes: 10 additions & 5 deletions integrations/pagerduty/helpers/rate_limiter.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,18 @@ async def call_with_rate_limiting(self, func: Callable, *args, **kwargs) -> Any:
retry_count += 1
continue
else:
logger.error(f"Client error {e.response.status_code} for URL: {e.request.url} - {e.response.text}")
logger.error(
f"Client error {e.response.status_code} for URL: {e.request.url} - {e.response.text}"
)
return {}

async def handle_rate_limiting(self, response: httpx.Response) -> None:
requests_remaining = int(response.headers.get("ratelimit-remaining", 0))
reset_time = int(response.headers.get("ratelimit-reset", 0))
logger.info(f"Remaining {requests_remaining} requests, reset time {reset_time} seconds")
logger.warning(f"Low request limit. Waiting for {reset_time} seconds before next call.")
logger.info(
f"Remaining {requests_remaining} requests, reset time {reset_time} seconds"
)
logger.warning(
f"Low request limit. Waiting for {reset_time} seconds before next call."
)
await asyncio.sleep(reset_time)

21 changes: 10 additions & 11 deletions integrations/pagerduty/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,24 +32,24 @@ async def enrich_service_with_analytics_data(
client: PagerDutyClient, services: list[dict[str, Any]], months_period: int
) -> list[dict[str, Any]]:
enriched_services = []

for i in range(0, len(services), ANALYTICS_BATCH_SIZE):
batch = services[i:i + ANALYTICS_BATCH_SIZE]
batch = services[i : i + ANALYTICS_BATCH_SIZE]
analytics_data = await asyncio.gather(
*[
client.get_service_analytics(service["id"], months_period)
for service in batch
]
)

enriched_services.extend([
{**service, "__analytics": analytics}
for service, analytics in zip(batch,
analytics_data)
])

return enriched_services

enriched_services.extend(
[
{**service, "__analytics": analytics}
for service, analytics in zip(batch, analytics_data)
]
)

return enriched_services


async def enrich_incidents_with_analytics_data(
Expand Down Expand Up @@ -118,7 +118,6 @@ async def on_services_resync(kind: str) -> ASYNC_GENERATOR_RESYNC_TYPE:
pager_duty_client, services, selector.analytics_months_period
)


yield await pager_duty_client.update_oncall_users(services)


Expand Down

0 comments on commit 016af45

Please sign in to comment.