Skip to content

Commit

Permalink
feat: Implement Compute Security Policies Crawler (#308)
Browse files Browse the repository at this point in the history
* Implement compute security policies crawler. Add unit tests and bootstrap script. Add compute security config in example config
---------

Co-authored-by: mshudrak <[email protected]>
  • Loading branch information
shravankshenoy and mshudrak authored Dec 27, 2023
1 parent 10f608b commit e79b3f8
Show file tree
Hide file tree
Showing 8 changed files with 123 additions and 2 deletions.
3 changes: 3 additions & 0 deletions example_config
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
"fetch": true,
"comment": "Fetch metadata about GCE disks"
},
"compute_security_policies": {
"fetch": true
},
"compute_images": {
"fetch": true
},
Expand Down
51 changes: 51 additions & 0 deletions src/gcp_scanner/crawler/compute_security_policies_crawler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
from typing import List, Dict, Any, Union

from googleapiclient import discovery

from gcp_scanner.crawler.interface_crawler import ICrawler


class ComputeSecurityPoliciesCrawler(ICrawler):
"""Handle crawling of compute security policies data."""

def crawl(self, project_name: str, service: discovery.Resource,
config: Dict[str, Union[bool, str]] = None) -> List[Dict[str, Any]]:
"""Retrieve a list of Compute security policies available in the project.
Args:
project_name: The name of the project to query information about.
service: A resource object for interacting with the GCP API.
config: Configuration options for the crawler (Optional).
Returns:
A list of resource objects representing the crawled data.
"""
logging.info("Retrieving list of Compute Security Policies")
security_policies_list = list()
try:
request = service.securityPolicies().list(project=project_name)
while request is not None:
response = request.execute()
security_policies_list.extend(response.get("items", []))
request = service.securityPolicies().list_next(
previous_request=request, previous_response=response)
except Exception:
logging.info("Failed to enumerate compute security policies in the %s", project_name)
logging.info(sys.exc_info())
return security_policies_list

2 changes: 2 additions & 0 deletions src/gcp_scanner/crawler/crawler_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from gcp_scanner.crawler.compute_firewall_rules_crawler import ComputeFirewallRulesCrawler
from gcp_scanner.crawler.compute_images_crawler import ComputeImagesCrawler
from gcp_scanner.crawler.compute_instances_crawler import ComputeInstancesCrawler
from gcp_scanner.crawler.compute_security_policies_crawler import ComputeSecurityPoliciesCrawler
from gcp_scanner.crawler.compute_snapshots_crawler import ComputeSnapshotsCrawler
from gcp_scanner.crawler.compute_static_ips_crawler import ComputeStaticIPsCrawler
from gcp_scanner.crawler.compute_subnets_crawler import ComputeSubnetsCrawler
Expand Down Expand Up @@ -53,6 +54,7 @@
"compute_disks": ComputeDisksCrawler,
"compute_images": ComputeImagesCrawler,
"compute_instances": ComputeInstancesCrawler,
"compute_security_policies": ComputeSecurityPoliciesCrawler,
"compute_snapshots": ComputeSnapshotsCrawler,
"datastore_kinds": DatastoreCrawler,
"dns_policies": DNSPoliciesCrawler,
Expand Down
1 change: 1 addition & 0 deletions src/gcp_scanner/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@
'compute_disks': 'compute',
'compute_images': 'compute',
'compute_instances': 'compute',
'compute_security_policies':'compute',
'compute_snapshots': 'compute',
'datastore_kinds': 'datastore',
'dns_policies': 'dns',
Expand Down
4 changes: 2 additions & 2 deletions src/gcp_scanner/test_acceptance.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

from . import scanner

RESOURCE_COUNT = 30
RESOURCE_COUNT = 31
RESULTS_JSON_COUNT = 1
PROJECT_INFO_COUNT = 5
IAM_POLICY_COUNT = 12
Expand All @@ -48,7 +48,7 @@
CLOUD_FUNCTIONS = 1
ENDPOINTS_COUNT = 0
KMS_COUNT = 1
SERVICES_COUNT = 40
SERVICES_COUNT = 42
SERVICE_ACCOUNTS_COUNT = 2


Expand Down
21 changes: 21 additions & 0 deletions src/gcp_scanner/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@
from .crawler.compute_firewall_rules_crawler import ComputeFirewallRulesCrawler
from .crawler.compute_images_crawler import ComputeImagesCrawler
from .crawler.compute_instances_crawler import ComputeInstancesCrawler
from .crawler.compute_security_policies_crawler import ComputeSecurityPoliciesCrawler
from .crawler.compute_snapshots_crawler import ComputeSnapshotsCrawler
from .crawler.compute_static_ips_crawler import ComputeStaticIPsCrawler
from .crawler.compute_subnets_crawler import ComputeSubnetsCrawler
Expand Down Expand Up @@ -391,6 +392,21 @@ def test_compute_instance_name(self):
)
)

def test_compute_security_policies(self):
"""Test compute security policies"""
self.assertTrue(
verify(
CrawlerFactory.create_crawler(
"compute_security_policies",
).crawl(
PROJECT_NAME,
ClientFactory.get_client("compute").get_service(self.credentials),
),
"compute_security_policies",
True,
)
)

def test_compute_disks_names(self):
"""Test compute disk names."""
self.assertTrue(
Expand Down Expand Up @@ -994,6 +1010,11 @@ def test_create_crawler_compute_instances(self):
crawler = CrawlerFactory.create_crawler("compute_instances")
self.assertIsInstance(crawler, ComputeInstancesCrawler)

def test_create_crawler_compute_security_policies(self):
"""Test create_crawler method with 'compute_security_policies' name."""
crawler = CrawlerFactory.create_crawler("compute_security_policies")
self.assertIsInstance(crawler, ComputeSecurityPoliciesCrawler)

def test_create_crawler_compute_images(self):
"""Test create_crawler method with 'compute_images' name."""
crawler = CrawlerFactory.create_crawler("compute_images")
Expand Down
13 changes: 13 additions & 0 deletions test/bootstrap/compute_security_policies.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash

# gcloud services enable compute.googleapis.com

# create security policy named "test-security-policy"
gcloud compute security-policies create test-security-policy \
--description "policy for external users"

# update default rule to deny all traffic (default rule has priority 2147483647)
gcloud compute security-policies rules update 2147483647 \
--security-policy test-security-policy \
--action "deny-404"

30 changes: 30 additions & 0 deletions test/compute_security_policies
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
[
{
CHECK "kind": "compute#securityPolicy",
"id": "1127822762545978383",
"creationTimestamp": "2023-12-26T01:54:40.411-08:00",
CHECK "name": "test-security-policy",
"description": "policy for external users",
"rules": [
{
"kind": "compute#securityPolicyRule",
"description": "default rule",
"priority": 2147483647,
"match": {
"versionedExpr": "SRC_IPS_V1",
"config": {
CHECK "srcIpRanges": [
"*"
]
}
},
CHECK "action": "deny(404)",
"preview": false
}
],
"fingerprint": "Hl4XzYCKyRI=",
"selfLink": "https://www.googleapis.com/compute/v1/projects/circular-fusion-406401/global/securityPolicies/test-security-policy",
"type": "CLOUD_ARMOR",
"labelFingerprint": "42WmSpB8rSM="
}
]

0 comments on commit e79b3f8

Please sign in to comment.