diff --git a/src/gcp_scanner/credsdb.py b/src/gcp_scanner/credsdb.py index 78aebeb5..cf2c47c0 100644 --- a/src/gcp_scanner/credsdb.py +++ b/src/gcp_scanner/credsdb.py @@ -189,7 +189,7 @@ def find_creds(explicit_path: Optional[str] = None) -> List[str]: search_paths.append(full_path) for dir_path in search_paths: - logging.info(f"Scanning {dir_path} for credentials.db") + logging.info("Scanning %s for credentials.db", dir_path) full_path = os.path.join(dir_path, "credentials.db") if os.path.exists(full_path) and os.access(full_path, os.R_OK): print(f"Identified accessible gcloud config profile {full_path}") diff --git a/src/gcp_scanner/scanner.py b/src/gcp_scanner/scanner.py index e6a9b383..e0b76daf 100644 --- a/src/gcp_scanner/scanner.py +++ b/src/gcp_scanner/scanner.py @@ -328,7 +328,7 @@ def impersonate_service_accounts( # trying to impersonate SAs within project if impers is not None and impers.get('impersonate', False) is True: - logging.info(f'Looking for impersonation options in {project_id}') + logging.info('Looking for impersonation options in %s', project_id) iam_client = iam_client_for_credentials(credentials) if is_set(scan_config, 'iam_policy') is False: iam_policy = CrawlerFactory.create_crawler('iam_policy').crawl( @@ -570,13 +570,14 @@ def main(): ( item for item in project_list - if item["projectId"] == force_project_id + if item['projectId'] == force_project_id ), None, ) is not None ): - logging.info(f"The project {force_project_id} is already in the list") + logging.info('The project %s is already in the list', + force_project_id) continue res = CrawlerFactory.create_crawler( 'project_info', @@ -624,7 +625,7 @@ def main(): # See i#267 on why we use the native threading approach here. for i, project_obj in enumerate(project_queue): - logging.info('Finished %d projects out of %d' % (i, len(project_queue) - 1)) + logging.info('Finished %d projects out of %d', i, len(project_queue) - 1) sync_t = threading.Thread(target=scanner.get_resources, args=(project_obj,)) sync_t.daemon = True sync_t.start()