Skip to content

Commit

Permalink
Simplifications and improvements: reduced toml nesting, only reset ID…
Browse files Browse the repository at this point in the history
…s we generate, remove tomli and requests as dependencies

Signed-off-by: Caroline Russell <[email protected]>
  • Loading branch information
cerrussell committed Oct 14, 2023
1 parent 7188180 commit de8e8e0
Show file tree
Hide file tree
Showing 5 changed files with 109 additions and 138 deletions.
13 changes: 5 additions & 8 deletions contrib/csaf.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,9 @@ namespace = "https://appthreat.com"
# Optional
contact_details = "[email protected]"

[notes]
# Category and text are required to add a note, add as many notes.note as you need
# Valid categories are: description, details, faq, general, legal_disclaimer, other, summary
[[notes.note]]
[[note]]
audience = ""
category = ""
text = ""
Expand All @@ -29,15 +28,14 @@ text = ""
label = ""
url = ""

[references]
# Summary and url are required, include as many references.ref as you like
# Category can be either "external" or "self"
[[references.ref]]
[[reference]]
category = ""
summary = ""
url = ""

[[references.ref]]
[[reference]]
category = ""
summary = ""
url = ""
Expand All @@ -53,9 +51,8 @@ initial_release_date = "" # ISO 8601
status = "draft"
version = ""

[tracking.revision_history]
# Include as many revision_history.revision entries as you like
[[tracking.revision_history.revision]]
# Include as many revision entries as you like
[[tracking.revision]]
date = ""
number = ""
summary = ""
Expand Down
14 changes: 7 additions & 7 deletions depscan/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,13 @@ def build_args():
help="Cache vulnerability information in platform specific "
"user_data_dir",
)
parser.add_argument(
"--csaf",
action="store_true",
default=False,
dest="csaf",
help="Generate a CSAF",
)
parser.add_argument(
"--sync",
action="store_true",
Expand Down Expand Up @@ -260,13 +267,6 @@ def build_args():
action="version",
version="%(prog)s " + get_version(),
)
parser.add_argument(
"--csaf",
action="store_true",
default=False,
dest="csaf",
help="Generate a CSAF",
)
return parser.parse_args()


Expand Down
133 changes: 74 additions & 59 deletions depscan/lib/csaf.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,11 @@
from copy import deepcopy
from datetime import datetime
from json import JSONDecodeError
from urllib import request

from requests import HTTPError
from urllib.error import HTTPError

import requests
import toml
import tomli

from depscan.lib.logger import LOG

Expand Down Expand Up @@ -1083,14 +1082,14 @@
}

TOML_TEMPLATE = (
"https://raw.githubusercontent.com/owasp-dep-scan/dep-scan/master/contrib"
"/csaf.toml"
"https://raw.githubusercontent.com/owasp-dep-scan/dep-scan/master"
"/contrib/csaf.toml"
)

ref_map = {
r"cve-[0-9]{4,}-[0-9]{4,}$": "CVE Record",
r"(?<=bugzilla.)\S+(?=.\w{3}/show_bug.cgi\?id=)": "Bugzilla",
r"https://github.com/([\w\d\-.]+/[\w\d\-.]+/security/)?advisories":
r"https://github.com/([\w\d\-.]+/[\w\d\-.]+/security/)?advisories":
"GitHub Advisory",
r"https://github.com/[\w\d\-.]+/[\w\d\-.]+/pull/\d+": "GitHub Pull Request",
r"https://github.com/[\w\d\-.]+/[\w\d\-.]+/commit": "GitHub Commit",
Expand Down Expand Up @@ -1296,7 +1295,6 @@ def format_references(ref):
list: A list of dictionaries with the formatted references.
"""
fmt_refs = [{"summary": get_ref_summary(r), "url": r} for r in ref]
refs = []
ids = []
github_advisory_regex = re.compile(r"GHSA-\w{4}-\w{4}-\w{4}$")
github_issue_regex = re.compile(r"(?<=issues/)\d+")
Expand All @@ -1305,6 +1303,7 @@ def format_references(ref):
)
bugzilla_id_regex = re.compile(r"(?<=show_bug.cgi\?id=)\d+")
redhat_advisory_regex = re.compile(r"RH[BS]A-\d{4}:\d+")
refs = []
for reference in fmt_refs:
r = reference["url"]
summary = reference["summary"]
Expand All @@ -1324,16 +1323,17 @@ def format_references(ref):
)
elif summary == "Bugzilla":
new_id = {
"system_name": bugzilla_regex.findall(r)[0].capitalize()
+ " Bugzilla ID",
"system_name": f"{bugzilla_regex.findall(r)[0].capitalize()}"
f" Bugzilla ID",
"text": bugzilla_id_regex.findall(r)[0],
}
if new_id["system_name"] == "Redhat Bugzilla ID":
new_id["system_name"] = "Red Hat Bugzilla ID"
ids.append(new_id)
elif summary == "Red Hat Security Advisory" or summary == (
"Red Hat Bug Fix Advisory"
):
elif summary in [
"Red Hat Security Advisory",
"Red Hat Bug Fix Advisory",
]:
ids.append(
{
"system_name": summary,
Expand Down Expand Up @@ -1378,47 +1378,53 @@ def parse_revision_history(tracking):
Returns:
dict: The updated tracking object with the parsed revision history.
"""
hx = deepcopy(tracking["revision_history"])
status = tracking["status"]
rev = bool(tracking["revision_history"].get("revision", None))
hx = deepcopy(tracking.get("revision", []))
if len(hx) > 0:
hx = cleanup_list(hx)
status = tracking.get("status")
if not status or len(status) == 0:
status = "draft"
rev = bool(len(hx))
comb = "-".join([status, str(rev)])
dt = get_date()
tracking = cleanup_dict(tracking)
tracking["current_release_date"] = tracking.get("current_release_date", dt)
tracking["initial_release_date"] = tracking.get("initial_release_date", dt)
if not tracking.get("id"):
LOG.warning("No tracking id, generating one.")
tracking["id"] = f"{dt}_v{tracking['version']}"
if comb in {"draft-False", "final-False", "interim-False"}:
hx["revision"] = [
hx.append(
{
"date": tracking["initial_release_date"],
"number": "1",
"summary": "Initial"
if comb == "final-False"
else "Initial [draft]",
}
]
)
elif comb == "final-True":
hx["revision"] = sorted(hx["revision"], key=lambda x: x["number"])
tracking["initial_release_date"] = hx["revision"][0]["date"]
if hx["revision"][0]["summary"] == "Initial [draft]":
hx["revision"][0]["summary"] = "Initial"
hx = sorted(hx, key=lambda x: x["number"])
tracking["initial_release_date"] = hx[0]["date"]
if hx[0]["summary"] == "Initial [draft]":
hx[0]["summary"] = "Initial"
else:
if (
tracking["current_release_date"]
== hx["revision"][-1]["date"]
):
if tracking["current_release_date"] == hx[-1]["date"]:
tracking["current_release_date"] = dt
hx["revision"].append(
hx.append(
{
"date": tracking["current_release_date"],
"number": str(len(hx["revision"]) + 1),
"number": str(len(hx) + 1),
"summary": "Update",
}
)
hx["revision"].reverse()
tracking["version"] = max(tracking["version"], hx["revision"][0]["number"])
tracking["revision_history"] = hx
hx.reverse()

tracking["version"] = str(
max(int(tracking.get("version", 0)), int(hx[0]["number"]))
)
if not tracking.get("id") or len(tracking.get("id")) == 0:
LOG.warning("No tracking id, generating one.")
tracking["id"] = f"{dt}_v{tracking['version']}"
tracking["revision"] = hx
tracking["status"] = status
return tracking


Expand All @@ -1437,12 +1443,18 @@ def import_product_tree(tree):
try:
with open(tree["easy_import"], "r") as f:
product_tree = json.load(f)
except [FileNotFoundError, JSONDecodeError]:
except JSONDecodeError:
LOG.warning(
"Unable to load product tree file. Please verify that your "
"product tree is a valid json file. Visit "
"https://github.com/owasp-dep-scan/dep-scan/blob/master/test/data"
"/product_tree.json for an example."
"https://github.com/owasp-dep-scan/dep-scan/blob/master/test"
"/data/product_tree.json for an example."
)
except FileNotFoundError:
LOG.warning(
"Cannot locate product tree at %s. Please verify you "
"have entered the correct filepath in your csaf.toml.",
tree["easy_import"],
)
return product_tree

Expand All @@ -1464,9 +1476,9 @@ def parse_toml(metadata):
"""
tracking = parse_revision_history(metadata["tracking"])
refs = []
[refs.append(v) for k, v in metadata["references"].items()]
[refs.append(v) for v in metadata["reference"]]
notes = []
[notes.append(v) for k, v in metadata["notes"].items()]
[notes.append(v) for v in metadata["note"]]
product_tree = import_product_tree(metadata["product_tree"])
output = {
"document": {
Expand All @@ -1476,7 +1488,7 @@ def parse_toml(metadata):
"csaf_version": "2.0",
"distribution": metadata["distribution"],
"lang": "en",
"notes": notes[0],
"notes": notes,
"publisher": {
"category": metadata["publisher"]["category"],
"contact_details": metadata["publisher"]["contact_details"],
Expand Down Expand Up @@ -1549,12 +1561,12 @@ def export_csaf(results, src_dir, reports_dir):
)
template["document"]["aggregate_severity"]["text"] = agg_severity
new_results = cleanup_dict(template)
# We want to preserve revision_history.revision for the TOML
metadata["tracking"] = deepcopy(new_results["document"]["tracking"])
metadata["tracking"]["id"] = ""
new_results["document"]["tracking"]["revision_history"] = new_results[
"document"
]["tracking"]["revision_history"]["revision"]
# Reset the id if it's one we've generated
if re.match(
r"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}_v", metadata["tracking"]["id"]
):
metadata["tracking"]["id"] = ""
outfile = os.path.join(
reports_dir,
f"csaf_v{new_results['document']['tracking']['version']}.json",
Expand All @@ -1577,17 +1589,20 @@ def import_csaf_toml(toml_file_path):
Raises:
TOMLDecodeError: If the TOML file contains duplicate keys or is invalid.
"""
with open(toml_file_path, "rb") as f:
try:
data = tomli.load(f)
except tomli.TOMLDecodeError as e:
LOG.error(
"Invalid TOML. Please make sure you do not have any duplicate "
"keys and that any filepaths are properly escaped if using "
"Windows."
)
exit(1)
return data
try:
with open(toml_file_path, "r") as f:
try:
return toml.load(f)
except toml.TomlDecodeError:
LOG.error(
"Invalid TOML. Please make sure you do not have any "
"duplicate keys and that any filepaths are properly escaped"
"if using Windows."
)
exit(1)
except FileNotFoundError:
download_toml_template(toml_file_path)
return import_csaf_toml(toml_file_path)


def download_toml_template(fn):
Expand All @@ -1603,17 +1618,15 @@ def download_toml_template(fn):
"""
try:
response = requests.get(TOML_TEMPLATE)
if response.status_code == 200:
with open(fn, "wb") as file:
file.write(response.content)
request.urlretrieve(TOML_TEMPLATE, fn)
except HTTPError:
LOG.error(
"Could not retrieve the CSAF toml template. Please visit "
"our repo at https://github.com/owasp-dep-scan/dep-scan and "
"manually download the csaf.toml file located in the contrib "
f"directory to {fn}."
)
exit(1)


def cleanup_list(d):
Expand Down Expand Up @@ -1661,3 +1674,5 @@ def cleanup_dict(d):
if entry:
new_dict[key] = entry
return new_dict


2 changes: 0 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@ dependencies = [
"PyYAML",
"rich",
"quart",
"requests",
"toml",
"tomli",
]

requires-python = ">=3.8"
Expand Down
Loading

0 comments on commit de8e8e0

Please sign in to comment.