diff --git a/tools/target-server-validator/README.md b/tools/target-server-validator/README.md
index 8b7b24b0..41078b12 100644
--- a/tools/target-server-validator/README.md
+++ b/tools/target-server-validator/README.md
@@ -85,6 +85,39 @@ Run the script as below
python3 main.py
```
+This script deploys an API proxy to validate if the target servers are reachable or not. To use the API proxy, make sure your payloads adhere to the following format:
+
+```json
+[
+ {
+ "host": "example.com",
+ "port": 443
+ },
+ {
+ "host": "example2.com",
+ "port": 443
+ },
+ // Add up to 8 more host-port combinations as needed
+]
+```
+
+The response will look like this -
+```json
+[
+ {
+ "host": "example.com",
+ "port": 443,
+ "status" : "REACHABLE"
+ },
+ {
+ "host": "example2.com",
+ "port": 443,
+ "status" : "UNKNOWN_HOST"
+ },
+ // and so on
+]
+```
+
## Report
Validation Report: `report.md` OR `report.csv` can be found in the same directory as the script.
diff --git a/tools/target-server-validator/apigee_utils.py b/tools/target-server-validator/apigee_utils.py
index eb8069d5..133cf5c8 100644
--- a/tools/target-server-validator/apigee_utils.py
+++ b/tools/target-server-validator/apigee_utils.py
@@ -20,6 +20,13 @@
import requests
import shutil
from time import sleep
+from utilities import ( # pylint: disable=import-error
+ run_validator_proxy,
+ unzip_file,
+ parse_proxy_hosts,
+ get_tes,
+)
+from base_logger import logger
class Apigee:
@@ -36,16 +43,16 @@ def __init__(
self.auth_type = auth_type
access_token = self.get_access_token()
self.auth_header = {
- "Authorization": "Bearer {}".format(access_token)
+ "Authorization": f"Bearer {access_token}"
if self.auth_type == "oauth"
- else "Basic {}".format(access_token) # noqa
+ else f"Basic {access_token}" # noqa
}
def is_token_valid(self, token):
url = f"https://www.googleapis.com/oauth2/v1/tokeninfo?access_token={token}" # noqa
response = requests.get(url)
if response.status_code == 200:
- print(f"Token Validated for user {response.json()['email']}")
+ logger.info(f"Token Validated for user {response.json()['email']}")
return True
return False
@@ -60,19 +67,15 @@ def get_access_token(self):
if self.is_token_valid(token):
return token
else:
- print(
- 'please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ' # noqa type: ignore
- )
+ logger.error('please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ') # noqa
sys.exit(1)
else:
return token
else:
if self.apigee_type == "x":
- print(
- 'please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ' # noqa
- )
+ logger.error('please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ') # noqa
else:
- print("please export APIGEE_OPDK_ACCESS_TOKEN")
+ logger.error('please export APIGEE_OPDK_ACCESS_TOKEN')
sys.exit(1)
def set_auth_header(self):
@@ -136,7 +139,7 @@ def create_api(self, api_name, proxy_bundle_path):
if response.status_code == 200:
revision = response.json().get('revision', "1")
return True, revision
- print(response.text)
+ logger.debug(response.text)
return False, None
def get_api_revisions_deployment(self, env, api_name, api_rev): # noqa
@@ -154,10 +157,10 @@ def get_api_revisions_deployment(self, env, api_name, api_rev): # noqa
if self.apigee_type == "opdk":
if api_deployment_status == "deployed":
return True
- print(f"API {api_name} is in Status: {api_deployment_status} !") # noqa
+ logger.debug(f"API {api_name} is in Status: {api_deployment_status} !") # noqa
return False
else:
- print(response.text)
+ logger.debug(response.text)
return False
def deploy_api(self, env, api_name, api_rev):
@@ -171,9 +174,9 @@ def deploy_api(self, env, api_name, api_rev):
else:
resp = response.json()
if "already deployed" in resp["error"]["message"]:
- print("Proxy {} is already Deployed".format(api_name))
+ logger.info(f"Proxy {api_name} is already Deployed")
return True
- print(response.text)
+ logger.debug(f"{response.text}")
return False
def deploy_api_bundle(self, env, api_name, proxy_bundle_path, api_force_redeploy=False): # noqa
@@ -185,50 +188,38 @@ def deploy_api_bundle(self, env, api_name, proxy_bundle_path, api_force_redeploy
if get_api_status:
api_exists = True
api_rev = api_revs[-1]
- print(
- f"Proxy with name {api_name} with revision {api_rev} already exists in Apigee Org {self.org}" # noqa
- )
+ logger.warning(f"Proxy with name {api_name} with revision {api_rev} already exists in Apigee Org {self.org}") # noqa
if api_force_redeploy:
api_exists = False
if not api_exists:
api_created, api_rev = self.create_api(api_name, proxy_bundle_path)
if api_created:
- print(
- f"Proxy has been imported with name {api_name} in Apigee Org {self.org}" # noqa
- )
+ logger.info(f"Proxy has been imported with name {api_name} in Apigee Org {self.org}") # noqa
api_exists = True
else:
- print(f"ERROR : Proxy {api_name} import failed !!! ")
+ logger.error(f"ERROR : Proxy {api_name} import failed !!! ")
return False
if api_exists:
if self.get_api_revisions_deployment(
env, api_name, api_rev
):
- print(f"INFO : Proxy {api_name} already active in to {env} in Apigee Org {self.org} !") # noqa
+ logger.info(f"Proxy {api_name} already active in to {env} in Apigee Org {self.org} !") # noqa
return True
else:
if self.deploy_api(env, api_name, api_rev):
- print(
- f"Proxy with name {api_name} has been deployed to {env} in Apigee Org {self.org}" # noqa
- )
+ logger.info(f"Proxy with name {api_name} has been deployed to {env} in Apigee Org {self.org}") # noqa
while api_deployment_retry_count < api_deployment_retry:
if self.get_api_revisions_deployment(
env, api_name, api_rev
):
- print(
- f"Proxy {api_name} active in runtime after {api_deployment_retry_count*api_deployment_sleep} seconds " # noqa
- )
+ logger.debug(f"Proxy {api_name} active in runtime after {api_deployment_retry_count*api_deployment_sleep} seconds ") # noqa
return True
else:
- print(
- f"Checking API deployment status in {api_deployment_sleep} seconds" # noqa
- )
+ logger.debug(f"Checking API deployment status in {api_deployment_sleep} seconds") # noqa
sleep(api_deployment_sleep)
api_deployment_retry_count += 1
else:
- print(
- f"ERROR : Proxy deployment to {env} in Apigee Org {self.org} Failed !!" # noqa
- )
+ logger.error(f"ERROR : Proxy deployment to {env} in Apigee Org {self.org} Failed !!") # noqa
return False
def get_api_vhost(self, vhost_name, env):
@@ -244,13 +235,11 @@ def get_api_vhost(self, vhost_name, env):
else:
hosts = response.json()["hostnames"]
if len(hosts) == 0:
- print(
- f"ERROR: Vhost/Env Group {vhost_name} contains no domains" # noqa
- )
+ logger.error(f"Vhost/Env Group {vhost_name} contains no domains") # noqa
return None
return hosts
else:
- print(f"ERROR: Vhost/Env Group {vhost_name} contains no domains") # noqa
+ logger.error(f"Vhost/Env Group {vhost_name} contains no domains")
return None
def list_apis(self, api_type):
@@ -289,6 +278,43 @@ def fetch_api_revision(self, api_type, api_name, revision, export_dir): # noqa
return True
return False
+ def fetch_api_proxy_ts_parallel(self, arg_tuple):
+ self.fetch_api_revision(arg_tuple[0], arg_tuple[1], arg_tuple[2], arg_tuple[3]) # noqa
+ unzip_file(
+ f"{arg_tuple[3]}/{arg_tuple[1]}.zip", # noqa
+ f"{arg_tuple[3]}/{arg_tuple[1]}", # noqa
+ )
+ parsed_proxy_hosts = parse_proxy_hosts(f"{arg_tuple[3]}/{arg_tuple[1]}/apiproxy") # noqa
+ proxy_tes = get_tes(parsed_proxy_hosts)
+ return arg_tuple[0], arg_tuple[1], parsed_proxy_hosts, proxy_tes
+
+ def fetch_env_target_servers_parallel(self, arg_tuple):
+ ts_info = self.get_target_server(arg_tuple[0], arg_tuple[1])
+ return arg_tuple[1], ts_info
+
+ def call_validator_proxy_parallel(self, arg_tuple):
+ response = run_validator_proxy(arg_tuple[0], arg_tuple[1], arg_tuple[2], arg_tuple[3], arg_tuple[4]) # noqa
+ if isinstance(response, list):
+ report = []
+ for output in response:
+ report.append(
+ [
+ output["name"],
+ output["extracted_from"],
+ output["host"],
+ output["port"],
+ output["env"],
+ output["status"],
+ output["info"] if output.get("info") else " & ".join(
+ list(set(arg_tuple[5][output["name"]])))
+ if output["name"] in arg_tuple[5]
+ else "No References in any API",
+ ]
+ )
+ return report
+ else:
+ return {"error": f"Error while calling the validator proxy - {response.get('error','unknown error')} with payload {arg_tuple[3]}"} # noqa
+
def write_proxy_bundle(self, export_dir, file_name, data):
file_path = f"./{export_dir}/{file_name}.zip"
with open(file_path, "wb") as fl:
diff --git a/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml b/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml
index df4a122b..9a7f1d46 100644
--- a/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml
+++ b/tools/target-server-validator/apiproxy/policies/AM-Set-Json-Response.xml
@@ -13,11 +13,6 @@
-->
- {
- "host":"{request.header.host_name}",
- "port": "{request.header.port_number}",
- "status":"{flow.reachableStatus}"
-}
-
+ {flow.result}
\ No newline at end of file
diff --git a/tools/target-server-validator/apiproxy/policies/JSON-Threat-Protection.xml b/tools/target-server-validator/apiproxy/policies/JSON-Threat-Protection.xml
new file mode 100644
index 00000000..37454c5b
--- /dev/null
+++ b/tools/target-server-validator/apiproxy/policies/JSON-Threat-Protection.xml
@@ -0,0 +1,19 @@
+
+
+
+ JSON Threat Protection
+
+ 5
+
+
\ No newline at end of file
diff --git a/tools/target-server-validator/apiproxy/proxies/default.xml b/tools/target-server-validator/apiproxy/proxies/default.xml
index 10d49b91..9a78d185 100644
--- a/tools/target-server-validator/apiproxy/proxies/default.xml
+++ b/tools/target-server-validator/apiproxy/proxies/default.xml
@@ -14,6 +14,9 @@
+
+ JSON-Threat-Protection
+
JC-Port-Open-Check
diff --git a/tools/target-server-validator/base_logger.py b/tools/target-server-validator/base_logger.py
new file mode 100644
index 00000000..400299fe
--- /dev/null
+++ b/tools/target-server-validator/base_logger.py
@@ -0,0 +1,66 @@
+#!/usr/bin/python
+
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import logging
+
+EXEC_INFO = True if os.getenv("EXEC_INFO") == "True" else False
+LOG_HANDLER = os.getenv("LOG_HANDLER", "Stream")
+LOG_FILE_PATH = os.getenv("LOG_FILE_PATH", "app.log")
+LOGLEVEL = os.getenv('LOGLEVEL', 'INFO').upper()
+
+if LOG_HANDLER not in {"File", "Stream"}:
+ LOG_HANDLER = "Stream"
+
+if LOGLEVEL not in {"CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"}:
+ LOGLEVEL = "INFO"
+
+
+class CustomFormatter(logging.Formatter):
+
+ grey = "\x1b[38;20m"
+ yellow = "\x1b[33;20m"
+ red = "\x1b[31;20m"
+ bold_red = "\x1b[31;1m"
+ reset = "\x1b[0m"
+ logging_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)" # noqa
+
+ FORMATS = {
+ logging.DEBUG: grey + logging_format + reset,
+ logging.INFO: grey + logging_format + reset,
+ logging.WARNING: yellow + logging_format + reset,
+ logging.ERROR: red + logging_format + reset,
+ logging.CRITICAL: bold_red + logging_format + reset
+ }
+
+ def format(self, record):
+ log_fmt = self.FORMATS.get(record.levelno)
+ formatter = logging.Formatter(log_fmt)
+ return formatter.format(record)
+
+
+logger = logging.getLogger("TargetServerValidator")
+logger.setLevel(getattr(logging, LOGLEVEL))
+
+if LOG_HANDLER == "File":
+ ch = logging.FileHandler(LOG_FILE_PATH, mode="a")
+else:
+ ch = logging.StreamHandler()
+
+ch.setFormatter(CustomFormatter())
+
+logger.addHandler(ch)
diff --git a/tools/target-server-validator/callout/pom.xml b/tools/target-server-validator/callout/pom.xml
index 90b66584..fb10d7d4 100644
--- a/tools/target-server-validator/callout/pom.xml
+++ b/tools/target-server-validator/callout/pom.xml
@@ -24,7 +24,7 @@
UTF-8
UTF-8
- 1.7
+ 1.8
../apiproxy/resources/java
6.8.7
1.7
@@ -70,7 +70,11 @@ do it manually by running these commands:
expressions
1.0.0
-
+
+ com.google.code.gson
+ gson
+ 2.10.1
+
@@ -134,18 +138,21 @@ do it manually by running these commands:
package
-
+
-
-
-
-
+
+
+
+
+
+
+
+
run
diff --git a/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java b/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java
index a31b6da8..a519a4d0 100644
--- a/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java
+++ b/tools/target-server-validator/callout/src/main/java/PortOpenCheck.java
@@ -25,7 +25,10 @@
import java.net.SocketTimeoutException;
import java.net.UnknownHostException;
import com.apigee.flow.execution.Action;
-
+import com.google.gson.Gson;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonElement;
/**
* A callout that checks if a particular port is open on a specified host.
@@ -72,12 +75,26 @@ private static String available(final String host, final int port) {
public ExecutionResult execute(final MessageContext messageContext,
final ExecutionContext executionContext) {
try {
- String hostname = messageContext.getMessage().getHeader("host_name");
- String port = messageContext.getMessage().getHeader("port_number");
- int portnumber = Integer.parseInt(port);
- String status = available(hostname, portnumber);
- messageContext.setVariable("flow.reachableStatus", status);
- return ExecutionResult.SUCCESS;
+ String payload = (String) messageContext.getVariable("request.content");
+ if (payload != null) {
+ Gson gson = new Gson();
+ JsonArray hostPortArray = gson.fromJson(payload, JsonArray.class);
+ for (JsonElement jsonElement : hostPortArray) {
+ JsonObject hostPortEntry = jsonElement.getAsJsonObject();
+ String host = hostPortEntry.get("host").getAsString();
+ Integer port = hostPortEntry.get("port").getAsInt();
+ String status = available(host, port);
+ hostPortEntry.addProperty("status", status);
+ }
+ String results = gson.toJson(hostPortArray);
+ messageContext.setVariable("flow.result", results);
+ return ExecutionResult.SUCCESS;
+ } else {
+ ExecutionResult executionResult = new ExecutionResult(false,
+ Action.ABORT);
+ executionResult.setErrorResponse("No payload received");
+ return executionResult;
+ }
} catch (Exception e) {
ExecutionResult executionResult = new ExecutionResult(false,
Action.ABORT);
@@ -87,7 +104,7 @@ public ExecutionResult execute(final MessageContext messageContext,
e.getClass().getName());
//--Set flow variables -- may be useful for debugging.
messageContext.setVariable("JAVA_ERROR", e.getMessage());
- return executionResult;
+ return executionResult;
}
}
}
diff --git a/tools/target-server-validator/main.py b/tools/target-server-validator/main.py
index ba94ee12..ccc6eb47 100644
--- a/tools/target-server-validator/main.py
+++ b/tools/target-server-validator/main.py
@@ -17,22 +17,21 @@
import os
import sys
+import json
from utilities import ( # pylint: disable=import-error
parse_config,
create_proxy_bundle,
- run_validator_proxy,
delete_file,
read_csv,
write_csv_report,
write_md_report,
create_dir,
- unzip_file,
- parse_proxy_hosts,
has_templating,
- get_tes,
get_row_host_port,
+ run_parallel,
)
from apigee_utils import Apigee # pylint: disable=import-error
+from base_logger import logger
def main():
@@ -46,29 +45,33 @@ def main():
report_format = "md"
# Intialize Source & Target Apigee
- SourceApigee = Apigee(
+ source_apigee = Apigee(
"x" if "apigee.googleapis.com" in cfg["source"]["baseurl"] else "opdk",
cfg["source"]["baseurl"],
cfg["source"]["auth_type"],
cfg["source"]["org"],
)
- TargetApigee = Apigee(
- "x" if "apigee.googleapis.com" in cfg["source"]["baseurl"] else "opdk",
+ target_apigee = Apigee(
+ "x" if "apigee.googleapis.com" in cfg["target"]["baseurl"] else "opdk",
cfg["target"]["baseurl"],
cfg["target"]["auth_type"],
cfg["target"]["org"],
)
- environments = SourceApigee.list_environments()
+ environments = source_apigee.list_environments()
all_target_servers = []
+
# Fetch Target Servers from Source Apigee@
- print("INFO: exporting Target Servers !")
+ logger.info("exporting Target Servers !")
for each_env in environments:
- target_servers = SourceApigee.list_target_servers(each_env)
- for each_ts in target_servers:
- ts_info = SourceApigee.get_target_server(each_env, each_ts)
+ target_servers = source_apigee.list_target_servers(each_env)
+ args = ((each_env, each_ts) for each_ts in target_servers)
+ results = run_parallel(source_apigee.fetch_env_target_servers_parallel, args) # noqa
+ for result in results:
+ _, ts_info = result
ts_info["env"] = each_env
+ ts_info["extracted_from"] = "TargetServer"
all_target_servers.append(ts_info)
# Fetch Targets in APIs & Shared Flows from Source Apigee
@@ -78,9 +81,7 @@ def main():
skip_proxy_list = (
cfg["validation"].get("skip_proxy_list", "").split(",")
)
- print(
- "INFO: exporting proxies to be analyzed ! this may take a while !"
- )
+ logger.info("exporting proxies to be analyzed ! this may take a while !") # noqa
api_types = ["apis", "sharedflows"]
api_revision_map = {}
for each_api_type in api_types:
@@ -91,102 +92,75 @@ def main():
)
create_dir(proxy_export_dir + f"/{each_api_type}")
- for each_api in SourceApigee.list_apis(each_api_type):
+ for each_api in source_apigee.list_apis(each_api_type):
if each_api not in skip_proxy_list:
api_revision_map[each_api_type]["proxies"][
each_api
- ] = SourceApigee.list_api_revisions(each_api_type, each_api)[ # noqa
+ ] = source_apigee.list_api_revisions(each_api_type, each_api)[ # noqa
-1
]
else:
- print(f"INFO : Skipping API {each_api}")
- for each_api_type, each_api_type_data in api_revision_map.items():
- proxy_hosts[each_api_type] = {}
- for each_api, each_api_rev in each_api_type_data["proxies"].items(): # noqa
- print(
- f"Exporting API : {each_api} with revision : {each_api_rev} " # noqa
- )
- SourceApigee.fetch_api_revision(
- each_api_type,
- each_api,
- each_api_rev,
- api_revision_map[each_api_type]["export_dir"],
- )
- print(
- f"Unzipping API : {each_api} with revision : {each_api_rev} " # noqa
- )
- unzip_file(
- f"{api_revision_map[each_api_type]['export_dir']}/{each_api}.zip", # noqa
- f"{api_revision_map[each_api_type]['export_dir']}/{each_api}", # noqa
- )
- parsed_proxy_hosts = parse_proxy_hosts(
- f"{api_revision_map[each_api_type]['export_dir']}/{each_api}/apiproxy" # noqa
- )
+ logger.info(f"Skipping API {each_api}")
+
+ args = (
+ (
+ each_api_type,
+ each_api,
+ each_api_rev,
+ each_api_type_data["export_dir"]
+ )
+ for each_api_type, each_api_type_data in api_revision_map.items()
+ for each_api, each_api_rev in each_api_type_data["proxies"].items()
+ )
+ logger.debug("Exporting proxy target servers")
+ results = run_parallel(source_apigee.fetch_api_proxy_ts_parallel, args)
+
+ for result in results:
+ each_api_type, each_api, parsed_proxy_hosts, proxy_ts = result
+ if proxy_hosts.get(each_api_type):
proxy_hosts[each_api_type][each_api] = parsed_proxy_hosts
- proxy_tes = get_tes(parsed_proxy_hosts)
- for each_te in proxy_tes:
- if each_te in proxy_targets:
- proxy_targets[each_te].append(
- f"{each_api_type} - {each_api}"
- )
- else:
- proxy_targets[each_te] = [
- f"{each_api_type} - {each_api}"
- ]
- # Validate Targets against Target Apigee
+ else:
+ proxy_hosts[each_api_type] = {}
+ proxy_hosts[each_api_type][each_api] = parsed_proxy_hosts
+
+ for each_te in proxy_ts:
+ if each_te in proxy_targets:
+ proxy_targets[each_te].append(
+ f"{each_api_type} - {each_api}"
+ )
+ else:
+ proxy_targets[each_te] = [
+ f"{each_api_type} - {each_api}"
+ ]
+ logger.debug("Exporting proxy target servers done")
bundle_path = os.path.dirname(os.path.abspath(__file__))
# Create Validation Proxy Bundle
- print("INFO: Creating proxy bundle !")
+ logger.info("Creating proxy bundle !")
create_proxy_bundle(bundle_path, cfg["validation"]["api_name"], "apiproxy")
# Deploy Validation Proxy Bundle
- print("INFO: Deploying proxy bundle !")
- if not TargetApigee.deploy_api_bundle(
+ logger.info("Deploying proxy bundle !")
+ if not target_apigee.deploy_api_bundle(
cfg["validation"]["api_env"],
cfg["validation"]["api_name"],
f"{bundle_path}/{cfg['validation']['api_name']}.zip",
cfg["validation"].getboolean("api_force_redeploy", False)
):
- print(f"Proxy: {cfg['validation']['api_name']} deployment failed.")
+ logger.error(f"Proxy: {cfg['validation']['api_name']} deployment failed.") # noqa
sys.exit(1)
# CleanUp Validation Proxy Bundle
- print("INFO: Cleaning Up local proxy bundle !")
+ logger.info("Cleaning Up local proxy bundle !") # noqa
delete_file(f"{bundle_path}/{cfg['validation']['api_name']}.zip")
# Fetch API Northbound Endpoint
- print(
- f"INFO: Fetching VHost with name {cfg['validation']['api_hostname']} !" # noqa
- )
+ logger.info(f"Fetching VHost with name {cfg['validation']['api_hostname']} !") # noqa
vhost_domain_name = cfg["validation"]["api_hostname"]
vhost_ip = cfg["validation"].get("api_ip", "").strip()
api_url = f"https://{vhost_domain_name}/validate-target-server"
final_report = []
- _cached_hosts = {}
-
- # Run Target Server Validation
- print("INFO: Running validation against All Target Servers")
- for each_ts in all_target_servers:
- status = run_validator_proxy(
- api_url, vhost_domain_name, vhost_ip, each_ts["host"], each_ts["port"], allow_insecure # noqa
- )
- final_report.append(
- [
- each_ts["name"],
- "TargetServer",
- each_ts["host"],
- str(each_ts["port"]),
- each_ts["env"],
- status,
- " & ".join(list(set(proxy_targets[each_ts["name"]])))
- if each_ts["name"] in proxy_targets
- else "No References in any API",
- ]
- )
- # Run Validation on Targets configured in Proxies
- print("INFO: Running validation against All Targets discovered in Proxies")
for each_api_type, apis in proxy_hosts.items():
for each_api, each_targets in apis.items():
for each_target in each_targets:
@@ -194,76 +168,86 @@ def main():
not has_templating(each_target["host"])
and not each_target["target_server"]
):
- if (
- f"{each_target['host']}:{each_target['port']}" in _cached_hosts # noqa
- ):
- print(
- "INFO: Fetching validation status from cached hosts" # noqa
- )
- status = _cached_hosts[
- f"{each_target['host']}:{each_target['port']}" # noqa
- ]
+ each_target["env"] = "_ORG_API_"
+ if each_api_type == "apis":
+ each_target["extracted_from"] = "APIProxy"
else:
- status = run_validator_proxy(
- api_url,
- vhost_domain_name,
- vhost_ip,
- each_target["host"],
- each_target["port"],
- allow_insecure,
- )
- _cached_hosts[
- f"{each_target['host']}:{each_target['port']}"
- ] = status
- final_report.append(
- [
- each_api,
- "APIProxy"
- if each_api_type == "apis"
- else "SharedFlow",
- each_target["host"],
- str(each_target["port"]),
- "_ORG_API_",
- status,
- each_target["source"],
- ]
- )
+ each_target["extracted_from"] = "SharedFlow"
+ each_target["name"] = each_api
+ each_target["info"] = each_target["source"]
+ all_target_servers.append(each_target)
+
if cfg["validation"].getboolean("check_csv"):
csv_file = cfg["csv"]["file"]
default_port = cfg["csv"]["default_port"]
csv_rows = read_csv(csv_file)
for each_row in csv_rows:
each_host, each_port = get_row_host_port(each_row, default_port)
- if f"{each_host}:{each_port}" in _cached_hosts:
- print("INFO: Fetching validation status from cached hosts")
- status = _cached_hosts[f"{each_host}:{each_port}"]
- else:
- status = run_validator_proxy(
- api_url, vhost_domain_name, vhost_ip, each_host, each_port, allow_insecure # noqa
- )
- _cached_hosts[f"{each_host}:{each_port}"] = status
- final_report.append(
- [
- each_host,
- "Input CSV",
- each_host,
- each_port,
- "_NA_",
- status,
- "_NA_",
- ]
- )
+ ts_csv_info = {}
+ ts_csv_info["host"] = each_host
+ ts_csv_info["port"] = each_port
+ ts_csv_info["name"] = each_host
+ ts_csv_info["info"] = "_NA_"
+ ts_csv_info["env"] = "_NA_"
+ ts_csv_info["extracted_from"] = "Input CSV"
+ all_target_servers.append(ts_csv_info)
+
+ batch_size = 5
+ batches = []
+ new_structure = []
+
+ for entry in all_target_servers:
+ host = entry.get('host', '')
+ port = entry.get('port', '')
+
+ if host and port:
+ new_entry = {
+ 'host': host,
+ 'port': str(port),
+ 'name': entry.get('name', ''),
+ 'env': entry.get('env', ''),
+ 'extracted_from': entry.get('extracted_from', ''),
+ 'info': entry.get('info', '')
+ }
+
+ new_structure.append(new_entry)
+
+ if len(new_structure) == batch_size:
+ batches.append(new_structure)
+ new_structure = []
+
+ if new_structure:
+ batches.append(new_structure)
+
+ args = (
+ (
+ api_url,
+ vhost_domain_name,
+ vhost_ip,
+ json.dumps(batch),
+ allow_insecure,
+ proxy_targets
+ )
+ for batch in batches
+ )
+
+ output_reports = run_parallel(source_apigee.call_validator_proxy_parallel, args) # noqa
+ for output in output_reports:
+ if isinstance(output, list):
+ final_report.extend(output)
+ else:
+ logger.error(output.get("error", "Unknown Error occured while calling proxy")) # noqa
# Write CSV Report
# TODO: support relative report path
if report_format == "csv":
report_file = "report.csv"
- print(f"INFO: Dumping report to file {report_file}")
+ logger.info(f"Dumping report to file {report_file}")
write_csv_report(report_file, final_report)
if report_format == "md":
report_file = "report.md"
- print(f"INFO: Dumping report to file {report_file}")
+ logger.info(f"Dumping report to file {report_file}")
write_md_report(report_file, final_report)
diff --git a/tools/target-server-validator/pipeline.sh b/tools/target-server-validator/pipeline.sh
index 2e1fe792..3f65ba59 100755
--- a/tools/target-server-validator/pipeline.sh
+++ b/tools/target-server-validator/pipeline.sh
@@ -65,7 +65,11 @@ smtp.gmail.com,465
EOF
# Install Dependencies
-python3 -m pip install -r "$SCRIPTPATH/requirements.txt"
+VENV_PATH="$SCRIPTPATH/venv"
+python3 -m venv "$VENV_PATH"
+# shellcheck source=/dev/null
+. "$VENV_PATH/bin/activate"
+pip install -r "$SCRIPTPATH/requirements.txt"
# Generate Gcloud Acccess Token
APIGEE_ACCESS_TOKEN="$(gcloud config config-helper --force-auth-refresh --format json | jq -r '.credential.access_token')"
@@ -78,3 +82,7 @@ python3 main.py
# Display Report
cat "$SCRIPTPATH/report.md"
+
+# deactivate venv & cleanup
+deactivate
+rm -rf "$VENV_PATH"
\ No newline at end of file
diff --git a/tools/target-server-validator/utilities.py b/tools/target-server-validator/utilities.py
index c96feb16..f0bf56bc 100644
--- a/tools/target-server-validator/utilities.py
+++ b/tools/target-server-validator/utilities.py
@@ -25,6 +25,8 @@
import xmltodict
import urllib3
from forcediphttpsadapter.adapters import ForcedIPHTTPSAdapter
+import concurrent.futures
+from base_logger import logger
def parse_config(config_file):
@@ -53,31 +55,34 @@ def create_proxy_bundle(proxy_bundle_directory, api_name, target_dir): # noqa
def run_validator_proxy(
- url, dns_host, vhost_ip, target_host, target_port="443", allow_insecure=False): # noqa
+ url, dns_host, vhost_ip, batch, allow_insecure=False): # noqa
headers = {
- "host_name": target_host,
- "port_number": str(target_port),
"Host": dns_host,
+ "Content-Type": "application/json"
}
if allow_insecure:
- print("INFO: Skipping Certificate Verification & disabling warnings because 'allow_insecure' is set to true") # noqa
+ logger.info("Skipping Certificate Verification & disabling warnings because 'allow_insecure' is set to true") # noqa
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
session = requests.Session()
if len(vhost_ip) > 0:
session.mount(
f"https://{dns_host}", ForcedIPHTTPSAdapter(dest_ip=vhost_ip)
) # noqa
- r = session.get(url, headers=headers, verify=(not allow_insecure))
- if r.status_code == 200:
- return r.json()["status"]
- return "STATUS_UNKNOWN"
+ try:
+ response = session.post(url, data=batch, verify=(not allow_insecure), headers=headers) # noqa
+ if response.status_code == 200:
+ return response.json()
+ else:
+ return {"error": f"{response.json().get('error','')}"} # noqa
+ except Exception as e:
+ return {"error": f"{e}"}
def delete_file(file_name):
try:
os.remove(file_name)
except FileNotFoundError:
- print(f"File {file_name} doesnt exist")
+ logger.warning(f"File {file_name} doesnt exist")
def write_csv_report(
@@ -99,8 +104,10 @@ def read_csv(file_name):
rows = csv.reader(file)
for each_row in rows:
read_rows.append(each_row)
+ if len(read_rows) != 0:
+ read_rows.pop(0)
except FileNotFoundError:
- print(f"WARN: {file_name} not found ! ")
+ logger.warning(f"File {file_name} not found ! ")
return read_rows
@@ -128,7 +135,7 @@ def create_dir(dir):
try:
os.makedirs(dir)
except FileExistsError:
- print(f"INFO: {dir} already exists")
+ logger.info(f"{dir} already exists")
def list_dir(dir, soft=False):
@@ -137,7 +144,7 @@ def list_dir(dir, soft=False):
except FileNotFoundError:
if soft:
return []
- print(f'ERROR: Directory "{dir}" not found')
+ logger.error(f"Directory '{dir}' not found")
sys.exit(1)
@@ -152,7 +159,7 @@ def parse_xml(file):
doc = xmltodict.parse(fl.read())
return doc
except FileNotFoundError:
- print(f'ERROR: File "{file}" not found')
+ logger.error(f"File '{file}' not found")
return {}
@@ -236,9 +243,28 @@ def get_tes(data):
def get_row_host_port(row, default_port=443):
host, port = None, None
if len(row) == 0:
- print("WARN: Input row has no host ")
+ logger.warning("Input row has no host.")
if len(row) == 1:
host, port = row[0], default_port
if len(row) > 1:
host, port = row[0], row[1]
return host, port
+
+
+def run_parallel(func, args, workers=10):
+ with concurrent.futures.ProcessPoolExecutor(max_workers=workers) as executor: # noqa
+ future_list = {executor.submit(func, arg) for arg in args}
+
+ data = []
+ for future in concurrent.futures.as_completed(future_list):
+ try:
+ data.append(future.result())
+ except Exception:
+ exception_info = future.exception()
+ if exception_info is not None:
+ error_message = str(exception_info)
+ logger.error(f"Error message: {error_message}")
+ else:
+ logger.info("No exception information available.")
+ logger.error(f"{future} generated an exception")
+ return data