diff --git a/example_config b/example_config index 0bc35b7a..2bfe6748 100644 --- a/example_config +++ b/example_config @@ -7,6 +7,9 @@ "fetch": true, "comment": "Fetch metadata about GCE disks" }, + "compute_security_policies": { + "fetch": true + }, "compute_images": { "fetch": true }, diff --git a/src/gcp_scanner/crawler/compute_security_policies_crawler.py b/src/gcp_scanner/crawler/compute_security_policies_crawler.py new file mode 100644 index 00000000..2eaa8da5 --- /dev/null +++ b/src/gcp_scanner/crawler/compute_security_policies_crawler.py @@ -0,0 +1,51 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import sys +from typing import List, Dict, Any, Union + +from googleapiclient import discovery + +from gcp_scanner.crawler.interface_crawler import ICrawler + + +class ComputeSecurityPoliciesCrawler(ICrawler): + """Handle crawling of compute security policies data.""" + + def crawl(self, project_name: str, service: discovery.Resource, + config: Dict[str, Union[bool, str]] = None) -> List[Dict[str, Any]]: + """Retrieve a list of Compute security policies available in the project. + + Args: + project_name: The name of the project to query information about. + service: A resource object for interacting with the GCP API. + config: Configuration options for the crawler (Optional). + + Returns: + A list of resource objects representing the crawled data. + """ + logging.info("Retrieving list of Compute Security Policies") + security_policies_list = list() + try: + request = service.securityPolicies().list(project=project_name) + while request is not None: + response = request.execute() + security_policies_list.extend(response.get("items", [])) + request = service.securityPolicies().list_next( + previous_request=request, previous_response=response) + except Exception: + logging.info("Failed to enumerate compute security policies in the %s", project_name) + logging.info(sys.exc_info()) + return security_policies_list + \ No newline at end of file diff --git a/src/gcp_scanner/crawler/crawler_factory.py b/src/gcp_scanner/crawler/crawler_factory.py index e8e97478..e31a4089 100644 --- a/src/gcp_scanner/crawler/crawler_factory.py +++ b/src/gcp_scanner/crawler/crawler_factory.py @@ -25,6 +25,7 @@ from gcp_scanner.crawler.compute_firewall_rules_crawler import ComputeFirewallRulesCrawler from gcp_scanner.crawler.compute_images_crawler import ComputeImagesCrawler from gcp_scanner.crawler.compute_instances_crawler import ComputeInstancesCrawler +from gcp_scanner.crawler.compute_security_policies_crawler import ComputeSecurityPoliciesCrawler from gcp_scanner.crawler.compute_snapshots_crawler import ComputeSnapshotsCrawler from gcp_scanner.crawler.compute_static_ips_crawler import ComputeStaticIPsCrawler from gcp_scanner.crawler.compute_subnets_crawler import ComputeSubnetsCrawler @@ -53,6 +54,7 @@ "compute_disks": ComputeDisksCrawler, "compute_images": ComputeImagesCrawler, "compute_instances": ComputeInstancesCrawler, + "compute_security_policies": ComputeSecurityPoliciesCrawler, "compute_snapshots": ComputeSnapshotsCrawler, "datastore_kinds": DatastoreCrawler, "dns_policies": DNSPoliciesCrawler, diff --git a/src/gcp_scanner/scanner.py b/src/gcp_scanner/scanner.py index 7db10319..878077a7 100644 --- a/src/gcp_scanner/scanner.py +++ b/src/gcp_scanner/scanner.py @@ -98,6 +98,7 @@ 'compute_disks': 'compute', 'compute_images': 'compute', 'compute_instances': 'compute', + 'compute_security_policies':'compute', 'compute_snapshots': 'compute', 'datastore_kinds': 'datastore', 'dns_policies': 'dns', diff --git a/src/gcp_scanner/test_acceptance.py b/src/gcp_scanner/test_acceptance.py index 8fd07b56..8ec40f36 100644 --- a/src/gcp_scanner/test_acceptance.py +++ b/src/gcp_scanner/test_acceptance.py @@ -23,7 +23,7 @@ from . import scanner -RESOURCE_COUNT = 30 +RESOURCE_COUNT = 31 RESULTS_JSON_COUNT = 1 PROJECT_INFO_COUNT = 5 IAM_POLICY_COUNT = 12 @@ -48,7 +48,7 @@ CLOUD_FUNCTIONS = 1 ENDPOINTS_COUNT = 0 KMS_COUNT = 1 -SERVICES_COUNT = 40 +SERVICES_COUNT = 42 SERVICE_ACCOUNTS_COUNT = 2 diff --git a/src/gcp_scanner/test_unit.py b/src/gcp_scanner/test_unit.py index f2401419..53069bb3 100644 --- a/src/gcp_scanner/test_unit.py +++ b/src/gcp_scanner/test_unit.py @@ -66,6 +66,7 @@ from .crawler.compute_firewall_rules_crawler import ComputeFirewallRulesCrawler from .crawler.compute_images_crawler import ComputeImagesCrawler from .crawler.compute_instances_crawler import ComputeInstancesCrawler +from .crawler.compute_security_policies_crawler import ComputeSecurityPoliciesCrawler from .crawler.compute_snapshots_crawler import ComputeSnapshotsCrawler from .crawler.compute_static_ips_crawler import ComputeStaticIPsCrawler from .crawler.compute_subnets_crawler import ComputeSubnetsCrawler @@ -391,6 +392,21 @@ def test_compute_instance_name(self): ) ) + def test_compute_security_policies(self): + """Test compute security policies""" + self.assertTrue( + verify( + CrawlerFactory.create_crawler( + "compute_security_policies", + ).crawl( + PROJECT_NAME, + ClientFactory.get_client("compute").get_service(self.credentials), + ), + "compute_security_policies", + True, + ) + ) + def test_compute_disks_names(self): """Test compute disk names.""" self.assertTrue( @@ -994,6 +1010,11 @@ def test_create_crawler_compute_instances(self): crawler = CrawlerFactory.create_crawler("compute_instances") self.assertIsInstance(crawler, ComputeInstancesCrawler) + def test_create_crawler_compute_security_policies(self): + """Test create_crawler method with 'compute_security_policies' name.""" + crawler = CrawlerFactory.create_crawler("compute_security_policies") + self.assertIsInstance(crawler, ComputeSecurityPoliciesCrawler) + def test_create_crawler_compute_images(self): """Test create_crawler method with 'compute_images' name.""" crawler = CrawlerFactory.create_crawler("compute_images") diff --git a/test/bootstrap/compute_security_policies.sh b/test/bootstrap/compute_security_policies.sh new file mode 100644 index 00000000..57488b5a --- /dev/null +++ b/test/bootstrap/compute_security_policies.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# gcloud services enable compute.googleapis.com + +# create security policy named "test-security-policy" +gcloud compute security-policies create test-security-policy \ + --description "policy for external users" + +# update default rule to deny all traffic (default rule has priority 2147483647) +gcloud compute security-policies rules update 2147483647 \ + --security-policy test-security-policy \ + --action "deny-404" + \ No newline at end of file diff --git a/test/compute_security_policies b/test/compute_security_policies new file mode 100644 index 00000000..34f35ba7 --- /dev/null +++ b/test/compute_security_policies @@ -0,0 +1,30 @@ +[ + { +CHECK "kind": "compute#securityPolicy", + "id": "1127822762545978383", + "creationTimestamp": "2023-12-26T01:54:40.411-08:00", +CHECK "name": "test-security-policy", + "description": "policy for external users", + "rules": [ + { + "kind": "compute#securityPolicyRule", + "description": "default rule", + "priority": 2147483647, + "match": { + "versionedExpr": "SRC_IPS_V1", + "config": { +CHECK "srcIpRanges": [ + "*" + ] + } + }, +CHECK "action": "deny(404)", + "preview": false + } + ], + "fingerprint": "Hl4XzYCKyRI=", + "selfLink": "https://www.googleapis.com/compute/v1/projects/circular-fusion-406401/global/securityPolicies/test-security-policy", + "type": "CLOUD_ARMOR", + "labelFingerprint": "42WmSpB8rSM=" + } +]