Skip to content

Commit

Permalink
Small printing improvements and -p/-f logic check
Browse files Browse the repository at this point in the history
  • Loading branch information
mshudrak committed Jan 24, 2024
1 parent 11b53f3 commit f672277
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/gcp_scanner/credsdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def find_creds(explicit_path: Optional[str] = None) -> List[str]:
search_paths.append(full_path)

for dir_path in search_paths:
print(f"Scanning {dir_path} for credentials.db")
logging.info(f"Scanning {dir_path} for credentials.db")
full_path = os.path.join(dir_path, "credentials.db")
if os.path.exists(full_path) and os.access(full_path, os.R_OK):
print(f"Identified accessible gcloud config profile {full_path}")
Expand Down
17 changes: 15 additions & 2 deletions src/gcp_scanner/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,6 @@ def impersonate_service_accounts(

# Enumerate projects accessible by SA
project_id = project['projectId']
print(f'Looking for impersonation options in {project_id}')
project_result = sa_results['projects'][project_id]
project_result['project_info'] = project
# Iterate over discovered service accounts by attempting impersonation
Expand All @@ -329,6 +328,7 @@ def impersonate_service_accounts(

# trying to impersonate SAs within project
if impers is not None and impers.get('impersonate', False) is True:
logging.info(f'Looking for impersonation options in {project_id}')
iam_client = iam_client_for_credentials(credentials)
if is_set(scan_config, 'iam_policy') is False:
iam_policy = CrawlerFactory.create_crawler('iam_policy').crawl(
Expand Down Expand Up @@ -565,6 +565,19 @@ def main():

if force_projects_list:
for force_project_id in force_projects_list:
if (
next(
(
item
for item in project_list
if item["projectId"] == force_project_id
),
None,
)
is not None
):
logging.info(f"The project {force_project_id} is already in the list")
continue
res = CrawlerFactory.create_crawler(
'project_info',
).crawl(
Expand Down Expand Up @@ -611,7 +624,7 @@ def main():

# See i#267 on why we use the native threading approach here.
for i, project_obj in enumerate(project_queue):
print('Finished %d projects out of %d' % (i, len(project_queue) - 1))
logging.info('Finished %d projects out of %d' % (i, len(project_queue) - 1))
sync_t = threading.Thread(target=scanner.get_resources, args=(project_obj,))
sync_t.daemon = True
sync_t.start()
Expand Down

0 comments on commit f672277

Please sign in to comment.