Skip to content

Commit

Permalink
Fix ANSSI scheme download.
Browse files Browse the repository at this point in the history
  • Loading branch information
J08nY committed Oct 2, 2024
1 parent 1773957 commit 2cd9093
Showing 1 changed file with 14 additions and 6 deletions.
20 changes: 14 additions & 6 deletions src/sec_certs/sample/cc_scheme.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,15 +222,23 @@ def get_canada_in_evaluation() -> list[dict[str, Any]]:


def _get_france(url, enhanced, artifacts) -> list[dict[str, Any]]: # noqa: C901
base_soup = _get_page(url)
session = requests.session()
challenge_soup = _get_page(constants.CC_ANSSI_BASE_URL, session=session)
bln_script = challenge_soup.find("head").find_all("script")[1]
bln_match = re.search(r"\"value\":\"([a-zA-Z0-9_-]+)\"", bln_script.string)
if not bln_match:
raise ValueError("Balleen challenge missing")

Check warning on line 230 in src/sec_certs/sample/cc_scheme.py

View check run for this annotation

Codecov / codecov/patch

src/sec_certs/sample/cc_scheme.py#L230

Added line #L230 was not covered by tests
bln_value = bln_match.group(1)
session.cookies.set("bln_challengejs", bln_value, domain="cyber.gouv.fr")
base_soup = _get_page(url, session=session)
pager = base_soup.find("nav", class_="pager")
last_page_a = re.search("[0-9]+", pager.find("a", title="Aller à la dernière page").text)
if not last_page_a:
raise ValueError
pages = int(last_page_a.group())
results = []
for page in range(pages + 1):
soup = _get_page(url + f"?page={page}")
soup = _get_page(url + f"?page={page}", session=session)
for row in soup.find_all("article", class_="node--type-produit-certifie-cc"):
cert: dict[str, Any] = {
"product": sns(row.find("h3").text),
Expand All @@ -255,7 +263,7 @@ def _get_france(url, enhanced, artifacts) -> list[dict[str, Any]]: # noqa: C901
cert["expiration_date"] = value
if enhanced:
e: dict[str, Any] = {}
cert_page = _get_page(cert["url"])
cert_page = _get_page(cert["url"], session=session)
infos = cert_page.find("div", class_="product-infos-wrapper")
for tr in infos.find_all("tr"):
label = tr.find("th").text
Expand Down Expand Up @@ -290,15 +298,15 @@ def _get_france(url, enhanced, artifacts) -> list[dict[str, Any]]: # noqa: C901
if "Rapport de certification" in a.text:
e["report_link"] = urljoin(constants.CC_ANSSI_BASE_URL, a["href"])
if artifacts:
e["report_hash"] = _get_hash(e["report_link"]).hex()
e["report_hash"] = _get_hash(e["report_link"], session=session).hex()

Check warning on line 301 in src/sec_certs/sample/cc_scheme.py

View check run for this annotation

Codecov / codecov/patch

src/sec_certs/sample/cc_scheme.py#L301

Added line #L301 was not covered by tests
elif "Cible de sécurité" in a.text:
e["target_link"] = urljoin(constants.CC_ANSSI_BASE_URL, a["href"])
if artifacts:
e["target_hash"] = _get_hash(e["target_link"]).hex()
e["target_hash"] = _get_hash(e["target_link"], session=session).hex()

Check warning on line 305 in src/sec_certs/sample/cc_scheme.py

View check run for this annotation

Codecov / codecov/patch

src/sec_certs/sample/cc_scheme.py#L305

Added line #L305 was not covered by tests
elif "Certificat" in a.text:
e["cert_link"] = urljoin(constants.CC_ANSSI_BASE_URL, a["href"])
if artifacts:
e["cert_hash"] = _get_hash(e["cert_link"]).hex()
e["cert_hash"] = _get_hash(e["cert_link"], session=session).hex()

Check warning on line 309 in src/sec_certs/sample/cc_scheme.py

View check run for this annotation

Codecov / codecov/patch

src/sec_certs/sample/cc_scheme.py#L309

Added line #L309 was not covered by tests
cert["enhanced"] = e
results.append(cert)
return results
Expand Down

0 comments on commit 2cd9093

Please sign in to comment.