Skip to content

Commit

Permalink
Merge pull request CactuseSecurity#2463 from tpurschke/develop
Browse files Browse the repository at this point in the history
Develop app & network importer adjustments
  • Loading branch information
tpurschke authored Jun 25, 2024
2 parents fb295f3 + fafb5b3 commit 0829706
Show file tree
Hide file tree
Showing 3 changed files with 256 additions and 28 deletions.
12 changes: 6 additions & 6 deletions scripts/customizing/modelling/convertNwObjDataExample.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,12 +115,12 @@ def generatePublicIPv4NetworksAsInternetArea():
# read config
subnetDataFilename = readConfig(args.config, ['subnetData'])[0]

try:
with open(subnetDataFilename, "r") as subnetFH:
subnets = (subnetFH.readlines())
except:
logger.error("error while trying to read subnets from csv file '" + subnetDataFilename + "', exception: " + str(traceback.format_exc()))
sys.exit(1)
# try:
# with open(subnetDataFilename, "r") as subnetFH:
# subnets = (subnetFH.readlines())
# except:
# logger.error("error while trying to read subnets from csv file '" + subnetDataFilename + "', exception: " + str(traceback.format_exc()))
# sys.exit(1)

# normalizing subnet data

Expand Down
220 changes: 220 additions & 0 deletions scripts/customizing/modelling/convertNwObjDataFromGit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,220 @@
#!/usr/bin/python3

# library for Tufin STRACK API calls
from asyncio.log import logger
import traceback
from textwrap import indent
import requests.packages
import requests
import json
import sys
import argparse
import logging
from sys import stdout
import ipaddress
import os
from pathlib import Path
import git # apt install python3-git # or: pip install git
import csv

defaultConfigFilename = "/usr/local/fworch/etc/secrets/customizingConfig.json"
ipamGitRepoTargetDir = "ipamRepo"


def getLogger(debug_level_in=0):
debug_level=int(debug_level_in)
if debug_level>=1:
llevel = logging.DEBUG
else:
llevel = logging.INFO

logger = logging.getLogger() # use root logger
# logHandler = logging.StreamHandler(stream=stdout)
logformat = "%(asctime)s [%(levelname)-5.5s] [%(filename)-10.10s:%(funcName)-10.10s:%(lineno)4d] %(message)s"
# logHandler.setLevel(llevel)
# handlers = [logHandler]
# logging.basicConfig(format=logformat, datefmt="%Y-%m-%dT%H:%M:%S%z", handlers=handlers, level=llevel)
logging.basicConfig(format=logformat, datefmt="%Y-%m-%dT%H:%M:%S%z", level=llevel)
logger.setLevel(llevel)

#set log level for noisy requests/connectionpool module to WARNING:
connection_log = logging.getLogger("urllib3.connectionpool")
connection_log.setLevel(logging.WARNING)
connection_log.propagate = True

if debug_level>8:
logger.debug ("debug_level=" + str(debug_level) )
return logger


def readConfig(configFilename, keysToGet=['username', 'password', 'ldapPath', 'apiBaseUri']):
try:

with open(configFilename, "r") as customConfigFH:
customConfig = json.loads(customConfigFH.read())

configValues = []
for key in keysToGet:
configValues.append(customConfig[key])
return configValues

except:
logger.error("could not read config file " + configFilename + ", Exception: " + str(traceback.format_exc()))
sys.exit(1)


def getNetworkBorders(ip):
if '/' in ip:
network = ipaddress.IPv4Network(ip, strict=False)
return str(network.network_address), str(network.broadcast_address), 'network'
else:
return str(ip), str(ip), 'host'


def extractSocketInfo(asset, services):
# ignoring services for the moment
sockets =[]

if 'assets' in asset and 'values' in asset['assets']:
for ip in asset['assets']['values']:
ip1, ip2, nwtype = getNetworkBorders(ip)
sockets.append({ "ip": ip1, "ip-end": ip2, "type": nwtype })
if 'objects' in asset:
for obj in asset['objects']:
if 'values' in obj:
for cidr in obj['values']:
ip1, ip2, nwtype = getNetworkBorders(cidr)
sockets.append({ "ip": ip1, "ip-end": ip2, "type": nwtype })
return sockets


def generatePublicIPv4NetworksAsInternetArea():
internetSubnets = ['0.0.0.0/5', '8.0.0.0/7', '11.0.0.0/8', '12.0.0.0/6', '16.0.0.0/4', '32.0.0.0/3', '64.0.0.0/2',
'128.0.0.0/3', '160.0.0.0/5', '168.0.0.0/6', '172.0.0.0/12', '172.32.0.0/11', '172.64.0.0/10',
'172.128.0.0/9', '173.0.0.0/8', '174.0.0.0/7', '176.0.0.0/4', '192.0.0.0/9', '192.128.0.0/11',
'192.160.0.0/13', '192.169.0.0/16', '192.170.0.0/15', '192.172.0.0/14', '192.176.0.0/12',
'192.192.0.0/10', '193.0.0.0/8', '194.0.0.0/7', '196.0.0.0/6', '200.0.0.0/5', '208.0.0.0/4',
'224.0.0.0/3']
internetDicts = []
for net in internetSubnets:
internetDicts.append({'ip': net, 'name': 'inet'})
return internetDicts


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Read configuration from FW management via API calls')
parser.add_argument('-c', '--config', default=defaultConfigFilename,
help='Filename of custom config file for modelling imports')
parser.add_argument('-l', '--limit', metavar='api_limit', default='150',
help='The maximal number of returned results per HTTPS Connection; default=50')

args = parser.parse_args()
subnets = []

logger = getLogger(debug_level_in=2)

# read config
subnetDataFilename = ipamGitRepoTargetDir + '/' + readConfig(args.config, ['subnetData'])[0]
ipamGitRepo = readConfig(args.config, ['ipamGitRepo'])[0]
ipamGitUser = readConfig(args.config, ['ipamGitUser'])[0]
ipamGitPassword = readConfig(args.config, ['ipamGitPassword'])[0]

try:
# get cmdb repo
if os.path.exists(ipamGitRepoTargetDir):
# If the repository already exists, open it and perform a pull
repo = git.Repo(ipamGitRepoTargetDir)
origin = repo.remotes.origin
origin.pull()
else:
repoUrl = "https://" + ipamGitUser + ":" + ipamGitPassword + "@" + ipamGitRepo
repo = git.Repo.clone_from(repoUrl, ipamGitRepoTargetDir)
except:
logger.error("error while trying to access git repo '" + ipamGitRepo + "', exception: " + str(traceback.format_exc()))
sys.exit(1)

# normalizing subnet data

subnetAr = []

try:
with open(subnetDataFilename, 'r') as file:
csv_reader = csv.DictReader(file)
for row in csv_reader:
subnetAr.append(row)
except:
logger.error("error while trying to read subnet csv file '" + subnetDataFilename + "', exception: " + str(traceback.format_exc()))
sys.exit(1)

normSubnetData = { "subnets": {}, "zones": {}, "areas": {} }
snId = 0

for subnet in subnetAr:
# ignore all "reserved" subnets whose name starts with "RES"
if not subnet['Subnetzname'].startswith('RES'):
naId = subnet['Subnetzname'][2:4]
subnetIp = subnet['Subnetzadresse']
netmask = subnet['Subnetzmaske']
cidr = str(ipaddress.ip_network(subnetIp + '/' + netmask))

nameParts = subnet['Subnetzname'].split('.')
if len(nameParts)>1:
zoneName = nameParts[1]
if len(nameParts)>=3:
subnetName = nameParts[2]
else:
subnetName = ""
else:
logger.warning("ignoring malformed network entry for net " + subnet['Subnetzadresse'] + ", subnetname: " + subnet['Subnetzname'])
continue

zoneNamePartsDots = nameParts[0].split('.')

zoneNamePartsUnderscore = zoneNamePartsDots[0].split('_')
zoneId = zoneNamePartsUnderscore[0][2:7]
areaName = '_'.join(zoneNamePartsUnderscore[1:])
normSubnet = {
"na-id": naId,
"na-name": areaName,
"zone-id": zoneId,
"zone-name": zoneName,
"ip": cidr,
"name": subnetName
}
normSubnetData['subnets'].update({ snId: normSubnet})
snId += 1;

# filling areas
if not naId in normSubnetData['areas']:
normSubnetData['areas'].update({ naId: {"area-name": areaName, "area-id": naId, "subnets": [], "zones": [] }})
normSubnetData['areas'][naId]['subnets'].append({"ip": cidr, "name": subnetName })
normSubnetData['areas'][naId]['zones'].append({"zone-id": zoneId, "zone-name": zoneName })

# filling zones
if not zoneId in normSubnetData['zones']:
normSubnetData['zones'].update({ zoneId: { "zone-name": zoneName, "subnets": [] }})
normSubnetData['zones'][zoneId]['subnets'].append({"ip": cidr, "name": subnetName })

# transform output
transfSubnetData = { "areas": [] }
for area in normSubnetData['areas'].values():
areaIdString = "NA" + area['area-id']
areaName = area['area-name']
transfarea = { "name": areaName, "id_string": areaIdString, "subnets": area['subnets'] }
transfSubnetData['areas'].append(transfarea)

# add Internet as NA00_Internet
transfSubnetData['areas'].append( {
'name': 'Internet',
'id_string': 'NA00',
'subnets': generatePublicIPv4NetworksAsInternetArea() } )
# open: what about ipv6 addresses?
# open: what about the companies own public ip addresses - should they be excluded here?

path = os.path.dirname(__file__)
fileOut = path + '/' + Path(os.path.basename(__file__)).stem + ".json"
logger.info("dumping into file " + fileOut)
with open(fileOut, "w") as outFH:
json.dump(transfSubnetData, outFH, indent=3)
sys.exit(0)
52 changes: 30 additions & 22 deletions scripts/customizing/modelling/getOwnersFromMultipleSources.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@
# username
# password
# apiBaseUri # Tufin API, e.g. "https://tufin.domain.com/"
# rlmVersion # Tufin RLM Version (API breaking change in 2.6)
# git
# gitRepoUrl
# gitusername
# gitpassword
# csvFiles # array of file basenames containing the app data
# ldapPath # full ldap user path (used for building DN from user basename)


from asyncio.log import logger
import traceback
from textwrap import indent
Expand All @@ -35,7 +35,6 @@
import csv



baseDir = "/usr/local/fworch/"
baseDirEtc = baseDir + "etc/"
repoTargetDir = baseDirEtc + "cmdb-repo"
Expand Down Expand Up @@ -76,18 +75,16 @@ def __init__(self, message="API unavailable"):
super().__init__(self.message)


def readConfig(configFilename):
def readConfig(configFilename, keyToGet):
try:
with open(configFilename, "r") as customConfigFH:
customConfig = json.loads(customConfigFH.read())
return (customConfig['username'], customConfig['password'], customConfig['apiBaseUri'],
customConfig['ldapPath'],
customConfig['gitRepoUrl'], customConfig['gitusername'], customConfig['gitpassword'], customConfig['csvFiles'])
return customConfig[keyToGet]

except:
logger.error("could not read config file " + configFilename + ", Exception: " + str(traceback.format_exc()))
logger.error("could not read key '" + keyToGet + "' from config file " + configFilename + ", Exception: " + str(traceback.format_exc()))
sys.exit(1)


# read owners from json file on disk which where imported from RLM
def getExistingOwnerIds(ownersIn):
rlmOwners = []
Expand Down Expand Up @@ -181,9 +178,6 @@ def getLogger(debug_level_in=0):
logger = logging.getLogger() # use root logger
# logHandler = logging.StreamHandler(stream=stdout)
logformat = "%(asctime)s [%(levelname)-5.5s] [%(filename)-10.10s:%(funcName)-10.10s:%(lineno)4d] %(message)s"
# logHandler.setLevel(llevel)
# handlers = [logHandler]
# logging.basicConfig(format=logformat, datefmt="%Y-%m-%dT%H:%M:%S%z", handlers=handlers, level=llevel)
logging.basicConfig(format=logformat, datefmt="%Y-%m-%dT%H:%M:%S%z", level=llevel)
logger.setLevel(llevel)

Expand Down Expand Up @@ -215,10 +209,9 @@ def rlmLogin(user, password, api_url):
", status code: " + str(response))


def rlmGetOwners(token, api_url):
def rlmGetOwners(token, api_url, rlmVersion=2.5):

headers = {}
rlmVersion = 2.5

if rlmVersion < 2.6:
headers = {'Authorization': 'Bearer ' + token, 'Content-Type': 'application/json'}
Expand Down Expand Up @@ -262,7 +255,15 @@ def rlmGetOwners(token, api_url):
logger = getLogger(debug_level_in=2)

# read config
rlmUsername, rlmPassword, rlmApiUrl, ldapPath, gitRepoUrl, gitUsername, gitPassword, csvFiles = readConfig(args.config)
rlmUsername = readConfig(args.config, 'username')
rlmPassword = readConfig(args.config, 'password')
rlmApiUrl = readConfig(args.config, 'apiBaseUri')
ldapPath = readConfig(args.config, 'ldapPath')
gitRepoUrl = readConfig(args.config, 'ipamGitRepo')
gitUsername = readConfig(args.config, 'ipamGitUser')
gitPassword = readConfig(args.config, 'gitpassword')
rlmVersion = readConfig(args.config, 'rlmVersion')
csvFiles = readConfig(args.config, 'csvFiles')

######################################################
# 1. get all owners
Expand All @@ -279,18 +280,25 @@ def rlmGetOwners(token, api_url):
dfAllApps = []
for csvFile in csvFiles:
csvFile = repoTargetDir + '/' + csvFile # add directory to csv files
with open(csvFile, newline='') as csvFile:
reader = csv.reader(csvFile)
dfAllApps += list(reader)[1:]# Skip headers in first line

logger.info("#total aps: " + str(len(dfAllApps)))
try:
with open(csvFile, newline='') as csvFile:
reader = csv.reader(csvFile)
dfAllApps += list(reader)[1:]# Skip headers in first line
except:
logger.error("error while trying to read csv file '" + csvFile + "', exception: " + str(traceback.format_exc()))
sys.exit(1)

logger.info("#total apps: " + str(len(dfAllApps)))

# append all owners from CSV
for owner in dfAllApps:
appId = owner[1]
appName = owner[0]
appMainUser = owner[3]
if appId not in ownersById.keys():
if appId.lower().startswith('app-') or appId.lower().startswith('com-'):
mainUserDn = buildDN(owner[3], ldapPath)
mainUserDn = buildDN(appMainUser, ldapPath)
if mainUserDn=='':
logger.warning('adding app without main user: ' + appId)

Expand All @@ -299,7 +307,7 @@ def rlmGetOwners(token, api_url):
owner[1]:
{
"app_id_external": appId,
"name": owner[0],
"name": appName,
"main_user": mainUserDn,
"modellers": [],
"import_source": importSourceString,
Expand All @@ -324,7 +332,7 @@ def rlmGetOwners(token, api_url):
try:
oauthToken = rlmLogin(rlmUsername, rlmPassword, rlmApiUrl + api_url_path_rlm_login)
# logger.debug("token for RLM: " + oauthToken)
rlmOwnerData = rlmGetOwners(oauthToken, rlmApiUrl + api_url_path_rlm_apps)
rlmOwnerData = rlmGetOwners(oauthToken, rlmApiUrl + api_url_path_rlm_apps, float(rlmVersion))

except:
logger.error("error while getting owner data from RLM API: " + str(traceback.format_exc()))
Expand All @@ -345,7 +353,7 @@ def rlmGetOwners(token, api_url):
ownersById[appId]['modellers'] += users
ownersById[appId]['app_servers'] += extractSocketInfo(rlmOwner['asset'], rlmOwner['services'])
else:
logger.warning('got app-id from RLM which is not in main app export: ' + appId)
logger.info('ignorning (inactive) app-id from RLM which is not in main app export: ' + appId)

# 3. convert to normalized struct
normOwners = { "owners": [] }
Expand Down

0 comments on commit 0829706

Please sign in to comment.