Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enhanced Target Server Validator #715

Merged
merged 10 commits into from
Jan 24, 2024
33 changes: 33 additions & 0 deletions tools/target-server-validator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,39 @@ Run the script as below
python3 main.py
```

This script deploys an API proxy to validate if the target servers are reachable or not. To use the API proxy, make sure your payloads adhere to the following format:

```json
[
{
"host": "example.com",
"port": 443
},
{
"host": "example2.com",
"port": 443
},
// Add up to 8 more host-port combinations as needed
]
```

The response will look like this -
```json
[
{
"host": "example.com",
"port": 443,
"status" : "REACHABLE"
},
{
"host": "example2.com",
"port": 443,
"status" : "UNKNOWN_HOST"
},
// and so on
]
```

## Report
Validation Report: `report.md` OR `report.csv` can be found in the same directory as the script.

Expand Down
104 changes: 65 additions & 39 deletions tools/target-server-validator/apigee_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,13 @@
import requests
import shutil
from time import sleep
from utilities import ( # pylint: disable=import-error
run_validator_proxy,
unzip_file,
parse_proxy_hosts,
get_tes,
)
from base_logger import logger


class Apigee:
Expand All @@ -36,16 +43,16 @@ def __init__(
self.auth_type = auth_type
access_token = self.get_access_token()
self.auth_header = {
"Authorization": "Bearer {}".format(access_token)
"Authorization": f"Bearer {access_token}"
if self.auth_type == "oauth"
else "Basic {}".format(access_token) # noqa
else f"Basic {access_token}" # noqa
}

def is_token_valid(self, token):
url = f"https://www.googleapis.com/oauth2/v1/tokeninfo?access_token={token}" # noqa
response = requests.get(url)
if response.status_code == 200:
print(f"Token Validated for user {response.json()['email']}")
logger.info(f"Token Validated for user {response.json()['email']}")
return True
return False

Expand All @@ -60,19 +67,15 @@ def get_access_token(self):
if self.is_token_valid(token):
return token
else:
print(
'please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ' # noqa type: ignore
)
logger.error('please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ') # noqa
sys.exit(1)
else:
return token
else:
if self.apigee_type == "x":
print(
'please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ' # noqa
)
logger.error('please run "export APIGEE_ACCESS_TOKEN=$(gcloud auth print-access-token)" first !! ') # noqa
else:
print("please export APIGEE_OPDK_ACCESS_TOKEN")
logger.error('please export APIGEE_OPDK_ACCESS_TOKEN')
sys.exit(1)

def set_auth_header(self):
Expand Down Expand Up @@ -136,7 +139,7 @@ def create_api(self, api_name, proxy_bundle_path):
if response.status_code == 200:
revision = response.json().get('revision', "1")
return True, revision
print(response.text)
logger.debug(response.text)
return False, None

def get_api_revisions_deployment(self, env, api_name, api_rev): # noqa
Expand All @@ -154,10 +157,10 @@ def get_api_revisions_deployment(self, env, api_name, api_rev): # noqa
if self.apigee_type == "opdk":
if api_deployment_status == "deployed":
return True
print(f"API {api_name} is in Status: {api_deployment_status} !") # noqa
logger.debug(f"API {api_name} is in Status: {api_deployment_status} !") # noqa
return False
else:
print(response.text)
logger.debug(response.text)
return False

def deploy_api(self, env, api_name, api_rev):
Expand All @@ -171,9 +174,9 @@ def deploy_api(self, env, api_name, api_rev):
else:
resp = response.json()
if "already deployed" in resp["error"]["message"]:
print("Proxy {} is already Deployed".format(api_name))
logger.info(f"Proxy {api_name} is already Deployed")
return True
print(response.text)
logger.debug(f"{response.text}")
return False

def deploy_api_bundle(self, env, api_name, proxy_bundle_path, api_force_redeploy=False): # noqa
Expand All @@ -185,50 +188,38 @@ def deploy_api_bundle(self, env, api_name, proxy_bundle_path, api_force_redeploy
if get_api_status:
api_exists = True
api_rev = api_revs[-1]
print(
f"Proxy with name {api_name} with revision {api_rev} already exists in Apigee Org {self.org}" # noqa
)
logger.warning(f"Proxy with name {api_name} with revision {api_rev} already exists in Apigee Org {self.org}") # noqa
if api_force_redeploy:
api_exists = False
if not api_exists:
api_created, api_rev = self.create_api(api_name, proxy_bundle_path)
if api_created:
print(
f"Proxy has been imported with name {api_name} in Apigee Org {self.org}" # noqa
)
logger.info(f"Proxy has been imported with name {api_name} in Apigee Org {self.org}") # noqa
api_exists = True
else:
print(f"ERROR : Proxy {api_name} import failed !!! ")
logger.error(f"ERROR : Proxy {api_name} import failed !!! ")
return False
if api_exists:
if self.get_api_revisions_deployment(
env, api_name, api_rev
):
print(f"INFO : Proxy {api_name} already active in to {env} in Apigee Org {self.org} !") # noqa
logger.info(f"Proxy {api_name} already active in to {env} in Apigee Org {self.org} !") # noqa
return True
else:
if self.deploy_api(env, api_name, api_rev):
print(
f"Proxy with name {api_name} has been deployed to {env} in Apigee Org {self.org}" # noqa
)
logger.info(f"Proxy with name {api_name} has been deployed to {env} in Apigee Org {self.org}") # noqa
while api_deployment_retry_count < api_deployment_retry:
if self.get_api_revisions_deployment(
env, api_name, api_rev
):
print(
f"Proxy {api_name} active in runtime after {api_deployment_retry_count*api_deployment_sleep} seconds " # noqa
)
logger.debug(f"Proxy {api_name} active in runtime after {api_deployment_retry_count*api_deployment_sleep} seconds ") # noqa
return True
else:
print(
f"Checking API deployment status in {api_deployment_sleep} seconds" # noqa
)
logger.debug(f"Checking API deployment status in {api_deployment_sleep} seconds") # noqa
sleep(api_deployment_sleep)
api_deployment_retry_count += 1
else:
print(
f"ERROR : Proxy deployment to {env} in Apigee Org {self.org} Failed !!" # noqa
)
logger.error(f"ERROR : Proxy deployment to {env} in Apigee Org {self.org} Failed !!") # noqa
return False

def get_api_vhost(self, vhost_name, env):
Expand All @@ -244,13 +235,11 @@ def get_api_vhost(self, vhost_name, env):
else:
hosts = response.json()["hostnames"]
if len(hosts) == 0:
print(
f"ERROR: Vhost/Env Group {vhost_name} contains no domains" # noqa
)
logger.error(f"Vhost/Env Group {vhost_name} contains no domains") # noqa
return None
return hosts
else:
print(f"ERROR: Vhost/Env Group {vhost_name} contains no domains") # noqa
logger.error(f"Vhost/Env Group {vhost_name} contains no domains")
return None

def list_apis(self, api_type):
Expand Down Expand Up @@ -289,6 +278,43 @@ def fetch_api_revision(self, api_type, api_name, revision, export_dir): # noqa
return True
return False

def fetch_api_proxy_ts_parallel(self, arg_tuple):
self.fetch_api_revision(arg_tuple[0], arg_tuple[1], arg_tuple[2], arg_tuple[3]) # noqa
unzip_file(
f"{arg_tuple[3]}/{arg_tuple[1]}.zip", # noqa
f"{arg_tuple[3]}/{arg_tuple[1]}", # noqa
)
parsed_proxy_hosts = parse_proxy_hosts(f"{arg_tuple[3]}/{arg_tuple[1]}/apiproxy") # noqa
proxy_tes = get_tes(parsed_proxy_hosts)
return arg_tuple[0], arg_tuple[1], parsed_proxy_hosts, proxy_tes

def fetch_env_target_servers_parallel(self, arg_tuple):
ts_info = self.get_target_server(arg_tuple[0], arg_tuple[1])
return arg_tuple[1], ts_info

def call_validator_proxy_parallel(self, arg_tuple):
response = run_validator_proxy(arg_tuple[0], arg_tuple[1], arg_tuple[2], arg_tuple[3], arg_tuple[4]) # noqa
if isinstance(response, list):
report = []
for output in response:
report.append(
[
output["name"],
output["extracted_from"],
output["host"],
output["port"],
output["env"],
output["status"],
output["info"] if output.get("info") else " & ".join(
list(set(arg_tuple[5][output["name"]])))
if output["name"] in arg_tuple[5]
else "No References in any API",
]
)
return report
else:
logger.error(f"Error while calling the validator proxy - {response.get('error','unknown error')}") # noqa

def write_proxy_bundle(self, export_dir, file_name, data):
file_path = f"./{export_dir}/{file_name}.zip"
with open(file_path, "wb") as fl:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,6 @@
-->
<AssignMessage name="AM-Set-Json-Response">
<Set>
<Payload contentType="application/json">{
"host":"{request.header.host_name}",
"port": "{request.header.port_number}",
"status":"{flow.reachableStatus}"
}
</Payload>
<Payload contentType="application/json">{flow.result}</Payload>
</Set>
</AssignMessage>
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<JSONThreatProtection continueOnError="false" enabled="true" name="JSON-Threat-Protection">
<DisplayName>JSON Threat Protection</DisplayName>
<Properties/>
<ArrayElementCount>5</ArrayElementCount>
<Source>request</Source>
</JSONThreatProtection>
3 changes: 3 additions & 0 deletions tools/target-server-validator/apiproxy/proxies/default.xml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@
<ProxyEndpoint name="default">
<PreFlow name="PreFlow">
<Request>
<Step>
<Name>JSON-Threat-Protection</Name>
</Step>
<Step>
<Name>JC-Port-Open-Check</Name>
</Step>
Expand Down
49 changes: 49 additions & 0 deletions tools/target-server-validator/base_logger.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import os
import logging

EXEC_INFO = True if os.getenv("EXEC_INFO") == "True" else False
LOG_HANDLER = os.getenv("LOG_HANDLER", "Stream")
LOG_FILE_PATH = os.getenv("LOG_FILE_PATH", "app.log")
LOGLEVEL = os.getenv('LOGLEVEL', 'INFO').upper()

if LOG_HANDLER not in {"File", "Stream"}:
LOG_HANDLER = "Stream"

if LOGLEVEL not in {"CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"}:
LOGLEVEL = "INFO"


class CustomFormatter(logging.Formatter):

grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
red = "\x1b[31;20m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
logging_format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)" # noqa

FORMATS = {
logging.DEBUG: grey + logging_format + reset,
logging.INFO: grey + logging_format + reset,
logging.WARNING: yellow + logging_format + reset,
logging.ERROR: red + logging_format + reset,
logging.CRITICAL: bold_red + logging_format + reset
}

def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)


logger = logging.getLogger("TargetServerValidator")
logger.setLevel(getattr(logging, LOGLEVEL))

if LOG_HANDLER == "File":
ch = logging.FileHandler(LOG_FILE_PATH, mode="a")
else:
ch = logging.StreamHandler()

ch.setFormatter(CustomFormatter())

logger.addHandler(ch)
23 changes: 15 additions & 8 deletions tools/target-server-validator/callout/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.7</java.version>
<java.version>1.8</java.version>
<apiproxy.java.rsrc.dir>../apiproxy/resources/java</apiproxy.java.rsrc.dir>
<testng.version>6.8.7</testng.version> <!-- current: 6.9.4 -->
<jmockit.version>1.7</jmockit.version>
Expand Down Expand Up @@ -70,7 +70,11 @@ do it manually by running these commands:
<artifactId>expressions</artifactId>
<version>1.0.0</version>
</dependency>

<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.10.1</version>
</dependency>
</dependencies>

<build>
Expand Down Expand Up @@ -134,18 +138,21 @@ do it manually by running these commands:
<execution>
<phase>package</phase>
<configuration>
<tasks>
<target>
<copy todir="${apiproxy.java.rsrc.dir}">
<fileset dir="${project.build.directory}">
<include name="${project.artifactId}.jar"/>
<!-- copy any required dependencies tht are not
already included in the Edge runtime: -->

<!-- -->
</fileset>
<mapper type="flatten"/>
</copy>
</tasks>

<copy todir="${apiproxy.java.rsrc.dir}">
<fileset dir="${project.build.directory}/lib">
<include name="gson-2.10.1.jar"/>
</fileset>
<mapper type="flatten"/>
</copy>
</target>
</configuration>
<goals>
<goal>run</goal>
Expand Down
Loading
Loading