From a1e1d858df21bf5e234a26c232a6fc8590677e11 Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 00:12:25 +0530
Subject: [PATCH 1/7] add ssti tests
fix bug in run_test util function
---
src/offat/tester/test_generator.py | 60 ++++++++++++++++++++++--
src/offat/tester/test_runner.py | 30 +++++++++++-
src/offat/tester/tester_utils.py | 73 ++++++++++++++++++++----------
3 files changed, 135 insertions(+), 28 deletions(-)
diff --git a/src/offat/tester/test_generator.py b/src/offat/tester/test_generator.py
index 5069d5d..15ff97e 100644
--- a/src/offat/tester/test_generator.py
+++ b/src/offat/tester/test_generator.py
@@ -689,7 +689,6 @@ def bopla_fuzz_test(
tasks.append(
{
'test_name': 'BOPLA Test',
- # f'{base_url}{endpoint_path}',
'url': join_uri_path(
base_url, openapi_parser.api_base_path, endpoint_path
),
@@ -830,7 +829,7 @@ def os_command_injection_fuzz_params_test(
**kwargs: Arbitrary keyword arguments.
Returns:
- List: List of dictionaries containing tests for SQLi
+ List: List of dictionaries containing tests for OS command injection
Raises:
Any exceptions raised during the execution.
@@ -866,7 +865,7 @@ def xss_html_injection_fuzz_params_test(
**kwargs: Arbitrary keyword arguments.
Returns:
- List: List of dictionaries containing tests for SQLi
+ List: List of dictionaries containing tests for XSS
Raises:
Any exceptions raised during the execution.
@@ -899,3 +898,58 @@ def xss_html_injection_fuzz_params_test(
result_details=result_details,
payloads_data=payloads_data,
)
+
+ def ssti_fuzz_params_test(self, openapi_parser: SwaggerParser | OpenAPIv3Parser):
+ '''Performs SSTI fuzzing based on the provided OpenAPIParser instance.
+
+ Args:
+ openapi_parser (OpenAPIParser): An instance of the OpenAPIParser class containing the parsed OpenAPI specification.
+ *args: Variable-length positional arguments.
+ **kwargs: Arbitrary keyword arguments.
+
+ Returns:
+ List: List of dictionaries containing tests for SSTI
+
+ Raises:
+ Any exceptions raised during the execution.
+ '''
+ test_name = 'SSTI Test'
+
+ payloads_data = [
+ {'request_payload': r'${7777+99999}', 'response_match_regex': r'107776'},
+ {'request_payload': r"{{7*'7'}}", 'response_match_regex': r'49'},
+ {'request_payload': r"{{7*'7'}}", 'response_match_regex': r'7777777'},
+ {
+ 'request_payload': r"{{ '' }}",
+ 'response_match_regex': r'',
+ },
+ {
+ 'request_payload': r"{{ '' | safe }}",
+ 'response_match_regex': r'',
+ },
+ {
+ 'request_payload': r"{{'owasp offat'.toUpperCase()}}",
+ 'response_match_regex': r'OWASP OFFAT',
+ },
+ {
+ 'request_payload': r"{{'owasp offat' | upper }}",
+ 'response_match_regex': r'OWASP OFFAT',
+ },
+ {
+ 'request_payload': r"<%= system('cat /etc/passwd') %>",
+ 'response_match_regex': r'root:.*',
+ },
+ {'request_payload': r'*{7*7}', 'response_match_regex': r'49'},
+ ]
+
+ result_details = {
+ True: 'Parameters are not vulnerable to SSTI Attack', # passed
+ False: 'One or more parameter is vulnerable to SSTI Attack', # failed
+ }
+
+ return self.__generate_injection_fuzz_params_test(
+ openapi_parser=openapi_parser,
+ test_name=test_name,
+ result_details=result_details,
+ payloads_data=payloads_data,
+ )
diff --git a/src/offat/tester/test_runner.py b/src/offat/tester/test_runner.py
index 3dca968..c51ead0 100644
--- a/src/offat/tester/test_runner.py
+++ b/src/offat/tester/test_runner.py
@@ -15,6 +15,10 @@ class PayloadFor(Enum):
class TestRunner:
+ '''
+ class to run tests generated by `TestGenerator` class
+ '''
+
def __init__(
self,
rate_limit: float = 60,
@@ -76,11 +80,33 @@ def _generate_payloads(
return {}
- async def send_request(self, test_task):
+ async def send_request(self, test_task: dict):
+ """
+ Sends an HTTP request based on the provided test_task.
+
+ Args:
+ test_task (dict): A dictionary containing the details of the test task.
+ It should include the following keys:
+ - 'url': The URL to send the request to.
+ - 'method': The HTTP method to use for the request.
+ - 'args': Additional positional arguments to pass to the request.
+ - 'kwargs': Additional keyword arguments to pass to the request.
+ - 'body_params': The body parameters for the request.
+ - 'query_params': The query parameters for the request.
+
+ Returns:
+ dict: A dictionary containing the test result. It includes the following keys:
+ - 'request_headers': The headers sent in the request.
+ - 'response_headers': The headers received in the response.
+ - 'response_body': The body of the response.
+ - 'response_status_code': The status code of the response.
+ - 'redirection': The redirection URL, if any.
+ - 'error': True if an error occurred during the request, False otherwise.
+ """
url = test_task.get('url')
http_method = test_task.get('method')
args = test_task.get('args')
- kwargs = test_task.get('kwargs')
+ kwargs = test_task.get('kwargs', {})
body_params = test_task.get('body_params')
query_params = test_task.get('query_params')
diff --git a/src/offat/tester/tester_utils.py b/src/offat/tester/tester_utils.py
index d8dd023..99ffaf4 100644
--- a/src/offat/tester/tester_utils.py
+++ b/src/offat/tester/tester_utils.py
@@ -77,6 +77,7 @@ def run_test(
try:
if skip_test_run:
+ logger.warning('Skipping test run for: %s', description)
test_results = tests
else:
test_results = run(test_runner.run_tests(tests, description))
@@ -96,9 +97,9 @@ def run_test(
if post_run_matcher_test:
test_results = PostRunTests.matcher(test_results)
-
- # update test result for status based code filter
- test_results = PostRunTests.filter_status_code_based_results(test_results)
+ else:
+ # update test result for status based code filter
+ test_results = PostRunTests.filter_status_code_based_results(test_results)
# update tests result success/failure details
test_results = PostRunTests.update_result_details(test_results)
@@ -143,7 +144,7 @@ def generate_and_run_tests(
results: list = []
# test for unsupported http methods
- test_name = 'Checking for Unsupported HTTP Methods/Verbs:'
+ test_name = 'Checking for Unsupported HTTP Methods/Verbs'
logger.info(test_name)
unsupported_http_endpoint_tests = test_generator.check_unsupported_http_methods(
api_parser
@@ -157,7 +158,7 @@ def generate_and_run_tests(
)
# sqli fuzz test
- test_name = 'Checking for SQLi vulnerability:'
+ test_name = 'Checking for SQLi vulnerability'
logger.info(test_name)
sqli_fuzz_tests = test_generator.sqli_fuzz_params_test(api_parser)
results += run_test(
@@ -167,7 +168,7 @@ def generate_and_run_tests(
description=f'(FUZZED) {test_name}',
)
- test_name = 'Checking for SQLi vulnerability in URI Path:'
+ test_name = 'Checking for SQLi vulnerability in URI Path'
logger.info(test_name)
sqli_fuzz_tests = test_generator.sqli_in_uri_path_fuzz_test(api_parser)
results += run_test(
@@ -178,7 +179,7 @@ def generate_and_run_tests(
)
# OS Command Injection Fuzz Test
- test_name = 'Checking for OS Command Injection Vulnerability with fuzzed params and checking response body:' # noqa: E501
+ test_name = 'Checking for OS Command Injection Vulnerability with fuzzed params and checking response body' # noqa: E501
logger.info(test_name)
os_command_injection_tests = test_generator.os_command_injection_fuzz_params_test(
api_parser
@@ -188,11 +189,11 @@ def generate_and_run_tests(
tests=os_command_injection_tests,
regex_pattern=regex_pattern,
post_run_matcher_test=True,
- description='(FUZZED) Checking for OS Command Injection:',
+ description='(FUZZED) Checking for OS Command Injection',
)
# XSS/HTML Injection Fuzz Test
- test_name = 'Checking for XSS/HTML Injection Vulnerability with fuzzed params and checking response body:' # noqa: E501
+ test_name = 'Checking for XSS/HTML Injection Vulnerability with fuzzed params and checking response body' # noqa: E501
logger.info(test_name)
os_command_injection_tests = test_generator.xss_html_injection_fuzz_params_test(
api_parser
@@ -202,11 +203,11 @@ def generate_and_run_tests(
tests=os_command_injection_tests,
regex_pattern=regex_pattern,
post_run_matcher_test=True,
- description='(FUZZED) Checking for XSS/HTML Injection:',
+ description='(FUZZED) Checking for XSS/HTML Injection',
)
# BOLA path tests with fuzzed data
- test_name = 'Checking for BOLA in PATH using fuzzed params:'
+ test_name = 'Checking for BOLA in PATH using fuzzed params'
logger.info(test_name)
bola_fuzzed_path_tests = test_generator.bola_fuzz_path_test(
api_parser, success_codes=[200, 201, 301]
@@ -220,7 +221,7 @@ def generate_and_run_tests(
# BOLA path test with fuzzed data + trailing slash
test_name = (
- 'Checking for BOLA in PATH with trailing slash and id using fuzzed params:'
+ 'Checking for BOLA in PATH with trailing slash and id using fuzzed params'
)
logger.info(test_name)
bola_trailing_slash_path_tests = test_generator.bola_fuzz_trailing_slash_path_test(
@@ -230,7 +231,7 @@ def generate_and_run_tests(
test_runner=test_runner,
tests=bola_trailing_slash_path_tests,
regex_pattern=regex_pattern,
- description='(FUZZED) Checking for BOLA in PATH with trailing slash:',
+ description='(FUZZED) Checking for BOLA in PATH with trailing slash',
)
# Mass Assignment / BOPLA
@@ -243,7 +244,19 @@ def generate_and_run_tests(
test_runner=test_runner,
tests=bopla_tests,
regex_pattern=regex_pattern,
- description='(FUZZED) Checking for Mass Assignment Vulnerability:',
+ description='(FUZZED) Checking for BOPLA/Mass Assignment Vulnerability',
+ )
+
+ # SSTI Vulnerability
+ test_name = 'Checking for SSTI vulnerability with fuzzed params and checking response body' # noqa: E501
+ logger.info(test_name)
+ ssti_tests = test_generator.ssti_fuzz_params_test(api_parser)
+ results += run_test(
+ test_runner=test_runner,
+ tests=ssti_tests,
+ regex_pattern=regex_pattern,
+ description='(FUZZED) Checking for SSTI Vulnerability',
+ post_run_matcher_test=True,
)
# Tests with User provided Data
@@ -251,9 +264,7 @@ def generate_and_run_tests(
logger.info('[bold] Testing with user provided data [/bold]')
# # BOLA path tests with fuzzed + user provided data
- test_name = (
- 'Checking for BOLA in PATH using fuzzed and user provided params:',
- )
+ test_name = 'Checking for BOLA in PATH using fuzzed and user provided params'
logger.info(test_name)
bola_fuzzed_user_data_tests = test_generator.test_with_user_data(
test_data_config,
@@ -265,7 +276,7 @@ def generate_and_run_tests(
test_runner=test_runner,
tests=bola_fuzzed_user_data_tests,
regex_pattern=regex_pattern,
- description='(USER + FUZZED) Checking for BOLA in PATH:',
+ description='(USER + FUZZED) Checking for BOLA in PATH',
)
# BOLA path test with fuzzed + user data + trailing slash
@@ -281,11 +292,11 @@ def generate_and_run_tests(
test_runner=test_runner,
tests=bola_trailing_slash_path_user_data_tests,
regex_pattern=regex_pattern,
- description='(USER + FUZZED) Checking for BOLA in PATH with trailing slash:',
+ description='(USER + FUZZED) Checking for BOLA in PATH with trailing slash',
)
# OS Command Injection Fuzz Test
- test_name = 'Checking for OS Command Injection Vulnerability with fuzzed & user params and checking response body:' # noqa: E501
+ test_name = 'Checking for OS Command Injection Vulnerability with fuzzed & user params and checking response body' # noqa: E501
logger.info(test_name)
os_command_injection_with_user_data_tests = test_generator.test_with_user_data(
test_data_config,
@@ -301,7 +312,7 @@ def generate_and_run_tests(
)
# XSS/HTML Injection Fuzz Test
- test_name = 'Checking for XSS/HTML Injection Vulnerability with fuzzed & user params and checking response body:' # noqa: E501
+ test_name = 'Checking for XSS/HTML Injection Vulnerability with fuzzed & user params and checking response body' # noqa: E501
logger.info(test_name)
os_command_injection_with_user_data_tests = test_generator.test_with_user_data(
test_data_config,
@@ -313,11 +324,27 @@ def generate_and_run_tests(
tests=os_command_injection_with_user_data_tests,
regex_pattern=regex_pattern,
post_run_matcher_test=True,
- description='(USER + FUZZED) Checking for XSS/HTML Injection:',
+ description='(USER + FUZZED) Checking for XSS/HTML Injection Vulnerability',
+ )
+
+ # STTI Vulnerability
+ test_name = 'Checking for SSTI vulnerability with fuzzed params & user data and checking response body' # noqa: E501
+ logger.info(test_name)
+ ssti_with_user_data_tests = test_generator.test_with_user_data(
+ test_data_config,
+ test_generator.ssti_fuzz_params_test,
+ openapi_parser=api_parser,
+ )
+ results += run_test(
+ test_runner=test_runner,
+ tests=ssti_with_user_data_tests,
+ regex_pattern=regex_pattern,
+ description='(USER + FUZZED) Checking for SSTI Vulnerability',
+ post_run_matcher_test=True,
)
# Broken Access Control Test
- test_name = 'Checking for Broken Access Control:'
+ test_name = 'Checking for Broken Access Control'
logger.info(test_name)
bac_results = PostRunTests.run_broken_access_control_tests(
results, test_data_config
From 4b0a961d85ce94820808a1820de7147de73bad89 Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 16:33:24 +0530
Subject: [PATCH 2/7] remove redundant data stored in results
---
src/offat/report/templates/table.py | 35 ++++++++++++++++++-----------
src/offat/tester/tester_utils.py | 29 ++++++++++++++++++++++++
2 files changed, 51 insertions(+), 13 deletions(-)
diff --git a/src/offat/report/templates/table.py b/src/offat/report/templates/table.py
index 335c8ef..52c0325 100644
--- a/src/offat/report/templates/table.py
+++ b/src/offat/report/templates/table.py
@@ -3,7 +3,10 @@
class TestResultTable:
- def __init__(self, table_width_percentage: float = 98, ) -> None:
+ def __init__(
+ self,
+ table_width_percentage: float = 98,
+ ) -> None:
self.console = console
self.table_width_percentage = table_width_percentage
@@ -19,7 +22,10 @@ def extract_result_table_cols(self, results: list[dict]) -> list[str]:
return sorted({key for dictionary in results for key in dictionary.keys()})
def generate_result_cols(self, results_list: list[dict]) -> list[Column]:
- return [Column(header=col_header, overflow='fold') for col_header in self.extract_result_table_cols(results_list)]
+ return [
+ Column(header=col_header, overflow='fold')
+ for col_header in self.extract_result_table_cols(results_list)
+ ]
def generate_result_table(self, results: list, filter_passed_results: bool = True):
results = self._sanitize_results(results, filter_passed_results)
@@ -29,23 +35,28 @@ def generate_result_table(self, results: list, filter_passed_results: bool = Tru
for result in results:
table_row = []
for col in cols:
- table_row.append(
- str(result.get(col.header, '[red]:bug: - [/red]')))
+ table_row.append(str(result.get(col.header, '[red]:bug: - [/red]')))
table.add_row(*table_row)
return table
- def _sanitize_results(self, results: list, filter_passed_results: bool = True, is_leaking_data: bool = False):
+ def _sanitize_results(
+ self,
+ results: list,
+ filter_passed_results: bool = True,
+ is_leaking_data: bool = False,
+ ):
if filter_passed_results:
- results = list(filter(lambda x: not x.get(
- 'result') or x.get('data_leak'), results))
+ results = list(
+ filter(lambda x: not x.get('result') or x.get('data_leak'), results)
+ )
# remove keys based on conditions or update their values
for result in results:
if result['result']:
- result['result'] = u"[bold green]Passed \u2713[/bold green]"
+ result['result'] = '[bold green]Passed \u2713[/bold green]'
else:
- result['result'] = u"[bold red]Failed \u00d7[/bold red]"
+ result['result'] = '[bold red]Failed \u00d7[/bold red]'
if not is_leaking_data:
del result['response_headers']
@@ -65,16 +76,14 @@ def _sanitize_results(self, results: list, filter_passed_results: bool = True, i
del result['response_match_regex']
if result.get('data_leak'):
- result['data_leak'] = u"[bold red]Leak Found \u00d7[/bold red]"
+ result['data_leak'] = '[bold red]Leak Found \u00d7[/bold red]'
else:
- result['data_leak'] = u"[bold green]No Leak \u2713[/bold green]"
+ result['data_leak'] = '[bold green]No Leak \u2713[/bold green]'
if not isinstance(result.get('malicious_payload'), str):
del result['malicious_payload']
del result['url']
- del result['args']
- del result['kwargs']
del result['test_name']
del result['response_filter']
del result['body_params']
diff --git a/src/offat/tester/tester_utils.py b/src/offat/tester/tester_utils.py
index 99ffaf4..d5be40f 100644
--- a/src/offat/tester/tester_utils.py
+++ b/src/offat/tester/tester_utils.py
@@ -122,6 +122,7 @@ def generate_and_run_tests(
test_data_config: dict | None = None,
ssl: bool = False,
capture_failed: bool = False,
+ remove_unused_data: bool = True,
):
'''
Generates and runs tests for provied OAS/Swagger file.
@@ -357,6 +358,34 @@ def generate_and_run_tests(
description=test_name,
)
+ if remove_unused_data:
+ for result in results:
+ result.pop('kwargs', None)
+ result.pop('args', None)
+ body_params = result.get('body_params', [{}])
+ query_params = result.get('query_params', [{}])
+ path_params = result.get('path_params', [{}])
+ malicious_payload = result.get('malicious_payload', '')
+
+ if isinstance(malicious_payload, list):
+ result['malicious_payload'] = [
+ {'name': param.get('name'), 'value': param.get('value')}
+ for param in malicious_payload
+ ]
+
+ result['body_params'] = [
+ {'name': param.get('name'), 'value': param.get('value')}
+ for param in body_params
+ ]
+ result['query_params'] = [
+ {'name': param.get('name'), 'value': param.get('value')}
+ for param in query_params
+ ]
+ result['path_params'] = [
+ {'name': param.get('name'), 'value': param.get('value')}
+ for param in path_params
+ ]
+
# save file to output if output flag is present
if output_file_format != 'table':
ReportGenerator.generate_report(
From 0aac9e5facf7002741f025448c2d9b06147849cf Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 16:59:33 +0530
Subject: [PATCH 3/7] create utils function, rename generator and runner
modules
---
.../{test_generator.py => generator.py} | 0
.../tester/{test_runner.py => runner.py} | 0
src/offat/tester/tester_utils.py | 76 ++++++++++++-------
3 files changed, 48 insertions(+), 28 deletions(-)
rename src/offat/tester/{test_generator.py => generator.py} (100%)
rename src/offat/tester/{test_runner.py => runner.py} (100%)
diff --git a/src/offat/tester/test_generator.py b/src/offat/tester/generator.py
similarity index 100%
rename from src/offat/tester/test_generator.py
rename to src/offat/tester/generator.py
diff --git a/src/offat/tester/test_runner.py b/src/offat/tester/runner.py
similarity index 100%
rename from src/offat/tester/test_runner.py
rename to src/offat/tester/runner.py
diff --git a/src/offat/tester/tester_utils.py b/src/offat/tester/tester_utils.py
index d5be40f..307bbf4 100644
--- a/src/offat/tester/tester_utils.py
+++ b/src/offat/tester/tester_utils.py
@@ -9,8 +9,8 @@
from re import search as regex_search
from .post_test_processor import PostRunTests
-from .test_generator import TestGenerator
-from .test_runner import TestRunner
+from .generator import TestGenerator
+from .runner import TestRunner
from ..report.generator import ReportGenerator
from ..report.summary import ResultSummarizer
from ..logger import logger, console
@@ -110,6 +110,26 @@ def run_test(
return test_results
+def reduce_data_list(data_list: list[dict] | str) -> list[dict] | str:
+ """
+ Reduces a list of dictionaries to only include 'name' and 'value' keys.
+
+ Args:
+ data_list (list[dict] | str): The input data list to be reduced.
+
+ Returns:
+ list[dict] | str: The reduced data list with only 'name' and 'value' keys.
+
+ """
+ if isinstance(data_list, list):
+ return [
+ {'name': param.get('name'), 'value': param.get('value')}
+ for param in data_list
+ ]
+
+ return data_list
+
+
# Note: redirects are allowed by default making it easier for pentesters/researchers
def generate_and_run_tests(
api_parser: SwaggerParser | OpenAPIv3Parser,
@@ -124,9 +144,25 @@ def generate_and_run_tests(
capture_failed: bool = False,
remove_unused_data: bool = True,
):
- '''
- Generates and runs tests for provied OAS/Swagger file.
- '''
+ """
+ Generates and runs tests for the provided OAS/Swagger file.
+
+ Args:
+ api_parser: An instance of SwaggerParser or OpenAPIv3Parser representing the parsed API specification.
+ regex_pattern: A string representing the regex pattern to match against the response body (optional).
+ output_file: A string representing the path to the output file (optional).
+ output_file_format: A string representing the format of the output file (optional).
+ rate_limit: An integer representing the rate limit for the tests (optional).
+ req_headers: A dictionary representing the request headers (optional).
+ proxies: A list of strings representing the proxies to be used (optional).
+ test_data_config: A dictionary representing the configuration for user-provided test data (optional).
+ ssl: A boolean indicating whether to use SSL for the requests (default: False).
+ capture_failed: A boolean indicating whether to capture failed tests in the report (default: False).
+ remove_unused_data: A boolean indicating whether to remove unused data (default: True).
+
+ Returns:
+ A list of test results.
+ """
if not is_host_up(openapi_parser=api_parser):
logger.error(
'Stopping tests due to unavailibility of host: %s', api_parser.host
@@ -362,29 +398,13 @@ def generate_and_run_tests(
for result in results:
result.pop('kwargs', None)
result.pop('args', None)
- body_params = result.get('body_params', [{}])
- query_params = result.get('query_params', [{}])
- path_params = result.get('path_params', [{}])
- malicious_payload = result.get('malicious_payload', '')
-
- if isinstance(malicious_payload, list):
- result['malicious_payload'] = [
- {'name': param.get('name'), 'value': param.get('value')}
- for param in malicious_payload
- ]
-
- result['body_params'] = [
- {'name': param.get('name'), 'value': param.get('value')}
- for param in body_params
- ]
- result['query_params'] = [
- {'name': param.get('name'), 'value': param.get('value')}
- for param in query_params
- ]
- result['path_params'] = [
- {'name': param.get('name'), 'value': param.get('value')}
- for param in path_params
- ]
+
+ result['body_params'] = reduce_data_list(result.get('body_params', [{}]))
+ result['query_params'] = reduce_data_list(result.get('query_params', [{}]))
+ result['path_params'] = reduce_data_list(result.get('path_params', [{}]))
+ result['malicious_payload'] = reduce_data_list(
+ result.get('malicious_payload', [])
+ )
# save file to output if output flag is present
if output_file_format != 'table':
From 789757695085d562955edb6175825187ee126e1f Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 18:57:18 +0530
Subject: [PATCH 4/7] result noise optimizations
---
src/offat/config_data_handler.py | 61 +++++++++++++-------------------
src/offat/tester/generator.py | 36 ++++++++++---------
src/offat/tester/tester_utils.py | 2 +-
src/offat/utils.py | 51 ++++++++++++++++++++++++++
4 files changed, 96 insertions(+), 54 deletions(-)
diff --git a/src/offat/config_data_handler.py b/src/offat/config_data_handler.py
index 8fec71a..127d891 100644
--- a/src/offat/config_data_handler.py
+++ b/src/offat/config_data_handler.py
@@ -1,39 +1,23 @@
+"""
+Module contains the functions to validate the test
+configuration data and populate user data for tests.
+"""
from copy import deepcopy
from .logger import logger
+from .utils import update_values
-def overwrite_user_params(list1: list[dict], list2: list[dict]) -> list[dict]:
+def validate_config_file_data(test_config_data: dict):
"""
- Update values in list1 based on the corresponding "name" values in list2.
+ Validates the provided test configuration data.
Args:
- list1 (list of dict): The list of dictionaries to be updated.
- list2 (list of dict): The list of dictionaries containing values to update from.
+ test_config_data (dict): The test configuration data to be validated.
Returns:
- list of dict: The updated list1 with values from list2.
-
- Example:
- ```python
- list1 = [{'name': 'id', 'value': 67}, {'name': 'email', 'value': 'old@example.com'}]
- list2 = [{'name': 'id', 'value': 10}, {'name': 'email', 'value': 'new@example.com'}]
- updated_list = update_values(list1, list2)
- print(updated_list)
- # Output: [{'name': 'id', 'value': 10}, {'name': 'email', 'value': 'new@example.com'}]
- ```
- """
- # Create a dictionary for faster lookup
- lookup_dict = {item['name']: item['value'] for item in list2}
-
- # Update values in list1 using index lookup
- for item in list1:
- if item['name'] in lookup_dict:
- item['value'] = lookup_dict[item['name']]
-
- return list1
+ bool or dict: Returns False if the data is invalid, otherwise returns the validated test configuration data.
-
-def validate_config_file_data(test_config_data: dict):
+ """
if not isinstance(test_config_data, dict):
logger.warning('Invalid data format')
return False
@@ -42,9 +26,7 @@ def validate_config_file_data(test_config_data: dict):
logger.warning('Error Occurred While reading file: %s', test_config_data)
return False
- if not test_config_data.get(
- 'actors',
- ):
+ if not test_config_data.get('actors'):
logger.warning('actors are required')
return False
@@ -57,6 +39,17 @@ def validate_config_file_data(test_config_data: dict):
def populate_user_data(actor_data: dict, actor_name: str, tests: list[dict]):
+ """
+ Populates user data for tests.
+
+ Args:
+ actor_data (dict): The data of the actor.
+ actor_name (str): The name of the actor.
+ tests (list[dict]): The list of tests.
+
+ Returns:
+ list[dict]: The updated list of tests.
+ """
tests = deepcopy(tests)
headers = actor_data.get('request_headers', [])
body_params = actor_data.get('body', [])
@@ -69,15 +62,11 @@ def populate_user_data(actor_data: dict, actor_name: str, tests: list[dict]):
request_headers[header.get('name')] = header.get('value')
for test in tests:
- test['body_params'] = overwrite_user_params(
- deepcopy(test['body_params']), body_params
- )
- test['query_params'] = overwrite_user_params(
+ test['body_params'] = update_values(deepcopy(test['body_params']), body_params)
+ test['query_params'] = update_values(
deepcopy(test['query_params']), query_params
)
- test['path_params'] += overwrite_user_params(
- deepcopy(test['path_params']), path_params
- )
+ test['path_params'] += update_values(deepcopy(test['path_params']), path_params)
# for post test processing tests such as broken authentication
test['test_actor_name'] = actor_name
if test.get('kwargs', {}).get('headers', {}).items():
diff --git a/src/offat/tester/generator.py b/src/offat/tester/generator.py
index 15ff97e..931d948 100644
--- a/src/offat/tester/generator.py
+++ b/src/offat/tester/generator.py
@@ -4,7 +4,7 @@
from .fuzzer import generate_random_int
from ..config_data_handler import populate_user_data
from ..parsers import SwaggerParser, OpenAPIv3Parser
-from ..utils import join_uri_path
+from ..utils import join_uri_path, get_unique_params
class TestGenerator:
@@ -97,7 +97,7 @@ def check_unsupported_http_methods(
query_params = endpoint_dict.get('query_params', [])
url = join_uri_path(openapi_parser.base_url, endpoint)
- http_methods: set = {'get', 'post', 'put', 'delete', 'options'}
+ http_methods: set = {'get', 'post', 'put', 'patch', 'delete', 'options'}
restricted_methods = http_methods - set(methods_allowed)
for restricted_method in restricted_methods:
@@ -182,14 +182,16 @@ def __fuzz_request_params(
filter(lambda x: x.get('in') == 'path', request_params)
)
- # handle path params from path_params
- # and replace path params by value in
- # endpoint path
+ # get endpoint path
endpoint_path: str = path_obj.get('path')
+
+ # get path params and fill them
path_params = path_obj.get('path_params', [])
- path_params += path_params_in_body
path_params = fill_params(path_params, openapi_parser.is_v3)
+ # get unique path params
+ path_params = get_unique_params(path_params, path_params_in_body)
+
for path_param in path_params:
path_param_name = path_param.get('name')
path_param_value = path_param.get('value')
@@ -455,17 +457,14 @@ def bola_fuzz_path_test(
filter(lambda x: x.get('in') == 'body', request_params)
)
- # handle path params from path_params
- # and replace path params by value in
- # endpoint path
endpoint_path: str = path_obj.get('path')
path_params = path_obj.get('path_params', [])
path_params_in_body = list(
filter(lambda x: x.get('in') == 'path', request_params)
)
- path_params += path_params_in_body
path_params = fill_params(path_params, openapi_parser.is_v3)
+ path_params = get_unique_params(path_params_in_body, path_params)
for path_param in path_params:
path_param_name = path_param.get('name')
@@ -547,14 +546,16 @@ def bola_fuzz_trailing_slash_path_test(
filter(lambda x: x.get('in') == 'path', request_params)
)
- # handle path params from path_params
- # and replace path params by value in
- # endpoint path
+ # get endpoint path
endpoint_path: str = path_obj.get('path')
+
+ # get path params and fill them
path_params = path_obj.get('path_params', [])
- path_params += path_params_in_body
path_params = fill_params(path_params, openapi_parser.is_v3)
+ # get unique path params
+ path_params = get_unique_params(path_params, path_params_in_body)
+
for path_param in path_params:
path_param_name = path_param.get('name')
path_param_value = path_param.get('value')
@@ -564,10 +565,11 @@ def bola_fuzz_trailing_slash_path_test(
# generate URL for BOLA attack
url = join_uri_path(base_url, openapi_parser.api_base_path, endpoint_path)
+ malicious_payload = generate_random_int()
if url.endswith('/'):
- url = f'{url}{generate_random_int()}'
+ url = f'{url}{malicious_payload}'
else:
- url = f'{url}/{generate_random_int()}'
+ url = f'{url}/{malicious_payload}'
tasks.append(
{
@@ -580,7 +582,7 @@ def bola_fuzz_trailing_slash_path_test(
'body_params': request_body_params,
'query_params': request_query_params,
'path_params': path_params,
- 'malicious_payload': [],
+ 'malicious_payload': malicious_payload,
'args': args,
'kwargs': kwargs,
'result_details': {
diff --git a/src/offat/tester/tester_utils.py b/src/offat/tester/tester_utils.py
index 307bbf4..1ec6c91 100644
--- a/src/offat/tester/tester_utils.py
+++ b/src/offat/tester/tester_utils.py
@@ -191,7 +191,7 @@ def generate_and_run_tests(
test_runner=test_runner,
tests=unsupported_http_endpoint_tests,
regex_pattern=regex_pattern,
- description='(FUZZED) ' + test_name,
+ description=f'(FUZZED) {test_name}',
)
# sqli fuzz test
diff --git a/src/offat/utils.py b/src/offat/utils.py
index 3b44d83..56022d5 100644
--- a/src/offat/utils.py
+++ b/src/offat/utils.py
@@ -268,3 +268,54 @@ def join_uri_path(*args: str, remove_prefix: str = '/') -> str:
url = urljoin(url, uri.removeprefix(remove_prefix))
return url
+
+
+def update_values(list1: list[dict], list2: list[dict]) -> list[dict]:
+ """
+ Update values in list1 based on the corresponding "name" values in list2.
+
+ Args:
+ list1 (list of dict): The list of dictionaries to be updated.
+ list2 (list of dict): The list of dictionaries containing values to update from.
+
+ Returns:
+ list of dict: The updated list1 with values from list2.
+
+ Example:
+ ```python
+ list1 = [{'name': 'id', 'value': 67}, {'name': 'email', 'value': 'old@example.com'}]
+ list2 = [{'name': 'id', 'value': 10}, {'name': 'email', 'value': 'new@example.com'}]
+ updated_list = update_values(list1, list2)
+ print(updated_list)
+ # Output: [{'name': 'id', 'value': 10}, {'name': 'email', 'value': 'new@example.com'}]
+ ```
+ """
+ # Create a dictionary for faster lookup
+ lookup_dict = {item['name']: item['value'] for item in list2}
+
+ # Update values in list1 using index lookup
+ for item in list1:
+ if item['name'] in lookup_dict:
+ item['value'] = lookup_dict[item['name']]
+
+ return list1
+
+
+def get_unique_params(list1: list[dict], list2: list[dict]) -> list[dict]:
+ '''Returns unique path params from list1 and list2
+
+ Args:
+ list1 (list of dict): The list of dictionaries to be updated.
+ list2 (list of dict): The list of dictionaries containing values to update from.
+
+ Returns:
+ list of dict: The updated list1 with values from list2.
+ '''
+ unique_path_params_names = []
+ unique_path_params = []
+ for path_param in list1 + list2:
+ if path_param.get('name') not in unique_path_params_names:
+ unique_path_params.append(path_param)
+ unique_path_params_names.append(path_param.get('name'))
+
+ return unique_path_params
From e92a0d0ab6b9af31e50be9b12a8fa849e3460d94 Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 19:04:59 +0530
Subject: [PATCH 5/7] fix bopla test
increase readability
---
src/offat/tester/generator.py | 2 +-
src/offat/utils.py | 13 +++++++------
2 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/src/offat/tester/generator.py b/src/offat/tester/generator.py
index 931d948..c7be901 100644
--- a/src/offat/tester/generator.py
+++ b/src/offat/tester/generator.py
@@ -671,8 +671,8 @@ def bopla_fuzz_test(
# endpoint path
endpoint_path: str = path_obj.get('path')
path_params = path_obj.get('path_params', [])
- path_params += path_params_in_body
path_params = fill_params(path_params, openapi_parser.is_v3)
+ path_params = get_unique_params(path_params_in_body, path_params)
for path_param in path_params:
path_param_name = path_param.get('name')
diff --git a/src/offat/utils.py b/src/offat/utils.py
index 56022d5..443da5d 100644
--- a/src/offat/utils.py
+++ b/src/offat/utils.py
@@ -311,11 +311,12 @@ def get_unique_params(list1: list[dict], list2: list[dict]) -> list[dict]:
Returns:
list of dict: The updated list1 with values from list2.
'''
- unique_path_params_names = []
- unique_path_params = []
+ unique_params_names = []
+ unique_params = []
for path_param in list1 + list2:
- if path_param.get('name') not in unique_path_params_names:
- unique_path_params.append(path_param)
- unique_path_params_names.append(path_param.get('name'))
+ param_name = path_param.get('name')
+ if param_name not in unique_params_names:
+ unique_params.append(path_param)
+ unique_params_names.append(param_name)
- return unique_path_params
+ return unique_params
From 957d9d6fd150d1046d9c9ba70c8c4d27734be3af Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 19:53:24 +0530
Subject: [PATCH 6/7] update README and add note after results table
---
src/README.md | 36 ++++++++++++++++++++++++++++++++
src/offat/tester/tester_utils.py | 5 +++++
2 files changed, 41 insertions(+)
diff --git a/src/README.md b/src/README.md
index e150b8c..83ee4c4 100644
--- a/src/README.md
+++ b/src/README.md
@@ -16,6 +16,7 @@ Automatically Tests for vulnerabilities after generating tests from openapi spec
- [x] Broken Access Control
- [x] Basic Command Injection
- [x] Basic XSS/HTML Injection test
+- [x] Basic SSTI test
- [ ] Broken Authentication
## Features
@@ -28,6 +29,7 @@ Automatically Tests for vulnerabilities after generating tests from openapi spec
- Proxy Support
- Secure Dockerized Project for Easy Usage
- Open Source Tool with MIT License
+- Github Action
## Demo
@@ -35,6 +37,40 @@ Automatically Tests for vulnerabilities after generating tests from openapi spec
> Note: The columns for 'data_leak' and 'result' in the table represent independent aspects. It's possible for there to be a data leak in the endpoint, yet the result for that endpoint may still be marked as 'Success'. This is because the 'result' column doesn't necessarily reflect the overall test result; it may indicate success even in the presence of a data leak.
+## Github Action
+
+- Create github action secret `url` for your repo
+- Setup github action workflow in your repo `.github/workflows/offat.yml`
+
+```yml
+name: OWASP OFFAT Sample Workflow
+
+on:
+ push:
+ branches:
+ - dev
+ - main
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: "download swagger/OAS file"
+ run: curl ${url} -o /tmp/swagger.json
+ env:
+ url: ${{ secrets.url }}
+
+ - name: "OWASP OFFAT CICD Scanner"
+ uses: OWASP/OFFAT@main # OWASP/OFFAT@v0.17.3
+ with:
+ file: /tmp/swagger.json # or ${{ secrets.url }}
+ rate_limit: 120
+ artifact_retention_days: 1
+```
+
+> Prefer locking action to specific version `OWASP/OFFAT@v0.17.3` instead of using `OWASP/OFFAT@main` and bump OFFAT action version after testing.
+
## PyPi Downloads
| Period | Count |
diff --git a/src/offat/tester/tester_utils.py b/src/offat/tester/tester_utils.py
index 1ec6c91..d7fbde4 100644
--- a/src/offat/tester/tester_utils.py
+++ b/src/offat/tester/tester_utils.py
@@ -422,6 +422,11 @@ def generate_and_run_tests(
capture_failed=capture_failed,
)
+ console.print(
+ "The columns for 'data_leak' and 'result' in the table represent independent aspects. It's possible for there to be a data leak in the endpoint, yet the result for that endpoint may still be marked as 'Success'. This is because the 'result' column doesn't necessarily reflect the overall test result; it may indicate success even in the presence of a data leak."
+ )
+
+ console.rule()
result_summary = ResultSummarizer.generate_count_summary(
results, table_title='Results Summary'
)
From 973482f50d9971e2cb571ffdf51599cf1d5a3991 Mon Sep 17 00:00:00 2001
From: Dhrumil Mistry <56185972+dmdhrumilmistry@users.noreply.github.com>
Date: Sat, 27 Apr 2024 19:55:11 +0530
Subject: [PATCH 7/7] bump deps and project version
---
src/poetry.lock | 186 ++++++++++++++++++++++-----------------------
src/pyproject.toml | 2 +-
2 files changed, 94 insertions(+), 94 deletions(-)
diff --git a/src/poetry.lock b/src/poetry.lock
index 4b4fe20..7d383f8 100644
--- a/src/poetry.lock
+++ b/src/poetry.lock
@@ -819,14 +819,14 @@ files = [
[[package]]
name = "pluggy"
-version = "1.4.0"
+version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"},
- {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"},
+ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
+ {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[package.extras]
@@ -835,19 +835,19 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pydantic"
-version = "2.7.0"
+version = "2.7.1"
description = "Data validation using Python type hints"
category = "main"
optional = true
python-versions = ">=3.8"
files = [
- {file = "pydantic-2.7.0-py3-none-any.whl", hash = "sha256:9dee74a271705f14f9a1567671d144a851c675b072736f0a7b2608fd9e495352"},
- {file = "pydantic-2.7.0.tar.gz", hash = "sha256:b5ecdd42262ca2462e2624793551e80911a1e989f462910bb81aef974b4bb383"},
+ {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"},
+ {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"},
]
[package.dependencies]
annotated-types = ">=0.4.0"
-pydantic-core = "2.18.1"
+pydantic-core = "2.18.2"
typing-extensions = ">=4.6.1"
[package.extras]
@@ -855,91 +855,91 @@ email = ["email-validator (>=2.0.0)"]
[[package]]
name = "pydantic-core"
-version = "2.18.1"
+version = "2.18.2"
description = "Core functionality for Pydantic validation and serialization"
category = "main"
optional = true
python-versions = ">=3.8"
files = [
- {file = "pydantic_core-2.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ee9cf33e7fe14243f5ca6977658eb7d1042caaa66847daacbd2117adb258b226"},
- {file = "pydantic_core-2.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6b7bbb97d82659ac8b37450c60ff2e9f97e4eb0f8a8a3645a5568b9334b08b50"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df4249b579e75094f7e9bb4bd28231acf55e308bf686b952f43100a5a0be394c"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0491006a6ad20507aec2be72e7831a42efc93193d2402018007ff827dc62926"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ae80f72bb7a3e397ab37b53a2b49c62cc5496412e71bc4f1277620a7ce3f52b"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58aca931bef83217fca7a390e0486ae327c4af9c3e941adb75f8772f8eeb03a1"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1be91ad664fc9245404a789d60cba1e91c26b1454ba136d2a1bf0c2ac0c0505a"},
- {file = "pydantic_core-2.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:667880321e916a8920ef49f5d50e7983792cf59f3b6079f3c9dac2b88a311d17"},
- {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f7054fdc556f5421f01e39cbb767d5ec5c1139ea98c3e5b350e02e62201740c7"},
- {file = "pydantic_core-2.18.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:030e4f9516f9947f38179249778709a460a3adb516bf39b5eb9066fcfe43d0e6"},
- {file = "pydantic_core-2.18.1-cp310-none-win32.whl", hash = "sha256:2e91711e36e229978d92642bfc3546333a9127ecebb3f2761372e096395fc649"},
- {file = "pydantic_core-2.18.1-cp310-none-win_amd64.whl", hash = "sha256:9a29726f91c6cb390b3c2338f0df5cd3e216ad7a938762d11c994bb37552edb0"},
- {file = "pydantic_core-2.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9ece8a49696669d483d206b4474c367852c44815fca23ac4e48b72b339807f80"},
- {file = "pydantic_core-2.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a5d83efc109ceddb99abd2c1316298ced2adb4570410defe766851a804fcd5b"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7973c381283783cd1043a8c8f61ea5ce7a3a58b0369f0ee0ee975eaf2f2a1b"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:54c7375c62190a7845091f521add19b0f026bcf6ae674bdb89f296972272e86d"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd63cec4e26e790b70544ae5cc48d11b515b09e05fdd5eff12e3195f54b8a586"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:561cf62c8a3498406495cfc49eee086ed2bb186d08bcc65812b75fda42c38294"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68717c38a68e37af87c4da20e08f3e27d7e4212e99e96c3d875fbf3f4812abfc"},
- {file = "pydantic_core-2.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d5728e93d28a3c63ee513d9ffbac9c5989de8c76e049dbcb5bfe4b923a9739d"},
- {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f0f17814c505f07806e22b28856c59ac80cee7dd0fbb152aed273e116378f519"},
- {file = "pydantic_core-2.18.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d816f44a51ba5175394bc6c7879ca0bd2be560b2c9e9f3411ef3a4cbe644c2e9"},
- {file = "pydantic_core-2.18.1-cp311-none-win32.whl", hash = "sha256:09f03dfc0ef8c22622eaa8608caa4a1e189cfb83ce847045eca34f690895eccb"},
- {file = "pydantic_core-2.18.1-cp311-none-win_amd64.whl", hash = "sha256:27f1009dc292f3b7ca77feb3571c537276b9aad5dd4efb471ac88a8bd09024e9"},
- {file = "pydantic_core-2.18.1-cp311-none-win_arm64.whl", hash = "sha256:48dd883db92e92519201f2b01cafa881e5f7125666141a49ffba8b9facc072b0"},
- {file = "pydantic_core-2.18.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b6b0e4912030c6f28bcb72b9ebe4989d6dc2eebcd2a9cdc35fefc38052dd4fe8"},
- {file = "pydantic_core-2.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3202a429fe825b699c57892d4371c74cc3456d8d71b7f35d6028c96dfecad31"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3982b0a32d0a88b3907e4b0dc36809fda477f0757c59a505d4e9b455f384b8b"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25595ac311f20e5324d1941909b0d12933f1fd2171075fcff763e90f43e92a0d"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14fe73881cf8e4cbdaded8ca0aa671635b597e42447fec7060d0868b52d074e6"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca976884ce34070799e4dfc6fbd68cb1d181db1eefe4a3a94798ddfb34b8867f"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684d840d2c9ec5de9cb397fcb3f36d5ebb6fa0d94734f9886032dd796c1ead06"},
- {file = "pydantic_core-2.18.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:54764c083bbe0264f0f746cefcded6cb08fbbaaf1ad1d78fb8a4c30cff999a90"},
- {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:201713f2f462e5c015b343e86e68bd8a530a4f76609b33d8f0ec65d2b921712a"},
- {file = "pydantic_core-2.18.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fd1a9edb9dd9d79fbeac1ea1f9a8dd527a6113b18d2e9bcc0d541d308dae639b"},
- {file = "pydantic_core-2.18.1-cp312-none-win32.whl", hash = "sha256:d5e6b7155b8197b329dc787356cfd2684c9d6a6b1a197f6bbf45f5555a98d411"},
- {file = "pydantic_core-2.18.1-cp312-none-win_amd64.whl", hash = "sha256:9376d83d686ec62e8b19c0ac3bf8d28d8a5981d0df290196fb6ef24d8a26f0d6"},
- {file = "pydantic_core-2.18.1-cp312-none-win_arm64.whl", hash = "sha256:c562b49c96906b4029b5685075fe1ebd3b5cc2601dfa0b9e16c2c09d6cbce048"},
- {file = "pydantic_core-2.18.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3e352f0191d99fe617371096845070dee295444979efb8f27ad941227de6ad09"},
- {file = "pydantic_core-2.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0295d52b012cbe0d3059b1dba99159c3be55e632aae1999ab74ae2bd86a33d7"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56823a92075780582d1ffd4489a2e61d56fd3ebb4b40b713d63f96dd92d28144"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3f79e17b56741b5177bcc36307750d50ea0698df6aa82f69c7db32d968c1c2"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38a5024de321d672a132b1834a66eeb7931959c59964b777e8f32dbe9523f6b1"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d2ce426ee691319d4767748c8e0895cfc56593d725594e415f274059bcf3cb76"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2adaeea59849ec0939af5c5d476935f2bab4b7f0335b0110f0f069a41024278e"},
- {file = "pydantic_core-2.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9b6431559676a1079eac0f52d6d0721fb8e3c5ba43c37bc537c8c83724031feb"},
- {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85233abb44bc18d16e72dc05bf13848a36f363f83757541f1a97db2f8d58cfd9"},
- {file = "pydantic_core-2.18.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:641a018af4fe48be57a2b3d7a1f0f5dbca07c1d00951d3d7463f0ac9dac66622"},
- {file = "pydantic_core-2.18.1-cp38-none-win32.whl", hash = "sha256:63d7523cd95d2fde0d28dc42968ac731b5bb1e516cc56b93a50ab293f4daeaad"},
- {file = "pydantic_core-2.18.1-cp38-none-win_amd64.whl", hash = "sha256:907a4d7720abfcb1c81619863efd47c8a85d26a257a2dbebdb87c3b847df0278"},
- {file = "pydantic_core-2.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aad17e462f42ddbef5984d70c40bfc4146c322a2da79715932cd8976317054de"},
- {file = "pydantic_core-2.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94b9769ba435b598b547c762184bcfc4783d0d4c7771b04a3b45775c3589ca44"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e0e57cc704a52fb1b48f16d5b2c8818da087dbee6f98d9bf19546930dc64b5"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76b86e24039c35280ceee6dce7e62945eb93a5175d43689ba98360ab31eebc4a"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a05db5013ec0ca4a32cc6433f53faa2a014ec364031408540ba858c2172bb0"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:250ae39445cb5475e483a36b1061af1bc233de3e9ad0f4f76a71b66231b07f88"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a32204489259786a923e02990249c65b0f17235073149d0033efcebe80095570"},
- {file = "pydantic_core-2.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6395a4435fa26519fd96fdccb77e9d00ddae9dd6c742309bd0b5610609ad7fb2"},
- {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2533ad2883f001efa72f3d0e733fb846710c3af6dcdd544fe5bf14fa5fe2d7db"},
- {file = "pydantic_core-2.18.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b560b72ed4816aee52783c66854d96157fd8175631f01ef58e894cc57c84f0f6"},
- {file = "pydantic_core-2.18.1-cp39-none-win32.whl", hash = "sha256:582cf2cead97c9e382a7f4d3b744cf0ef1a6e815e44d3aa81af3ad98762f5a9b"},
- {file = "pydantic_core-2.18.1-cp39-none-win_amd64.whl", hash = "sha256:ca71d501629d1fa50ea7fa3b08ba884fe10cefc559f5c6c8dfe9036c16e8ae89"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e178e5b66a06ec5bf51668ec0d4ac8cfb2bdcb553b2c207d58148340efd00143"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:72722ce529a76a4637a60be18bd789d8fb871e84472490ed7ddff62d5fed620d"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fe0c1ce5b129455e43f941f7a46f61f3d3861e571f2905d55cdbb8b5c6f5e2c"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4284c621f06a72ce2cb55f74ea3150113d926a6eb78ab38340c08f770eb9b4d"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a0c3e718f4e064efde68092d9d974e39572c14e56726ecfaeebbe6544521f47"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2027493cc44c23b598cfaf200936110433d9caa84e2c6cf487a83999638a96ac"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76909849d1a6bffa5a07742294f3fa1d357dc917cb1fe7b470afbc3a7579d539"},
- {file = "pydantic_core-2.18.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ee7ccc7fb7e921d767f853b47814c3048c7de536663e82fbc37f5eb0d532224b"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ee2794111c188548a4547eccc73a6a8527fe2af6cf25e1a4ebda2fd01cdd2e60"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a139fe9f298dc097349fb4f28c8b81cc7a202dbfba66af0e14be5cfca4ef7ce5"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d074b07a10c391fc5bbdcb37b2f16f20fcd9e51e10d01652ab298c0d07908ee2"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c69567ddbac186e8c0aadc1f324a60a564cfe25e43ef2ce81bcc4b8c3abffbae"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:baf1c7b78cddb5af00971ad5294a4583188bda1495b13760d9f03c9483bb6203"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2684a94fdfd1b146ff10689c6e4e815f6a01141781c493b97342cdc5b06f4d5d"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:73c1bc8a86a5c9e8721a088df234265317692d0b5cd9e86e975ce3bc3db62a59"},
- {file = "pydantic_core-2.18.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e60defc3c15defb70bb38dd605ff7e0fae5f6c9c7cbfe0ad7868582cb7e844a6"},
- {file = "pydantic_core-2.18.1.tar.gz", hash = "sha256:de9d3e8717560eb05e28739d1b35e4eac2e458553a52a301e51352a7ffc86a35"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"},
+ {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"},
+ {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"},
+ {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"},
+ {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"},
+ {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"},
+ {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"},
+ {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"},
+ {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"},
+ {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"},
+ {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"},
+ {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"},
+ {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"},
+ {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"},
+ {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"},
+ {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"},
+ {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"},
+ {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"},
+ {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"},
+ {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"},
+ {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"},
]
[package.dependencies]
@@ -963,14 +963,14 @@ windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pytest"
-version = "8.1.1"
+version = "8.1.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.8"
files = [
- {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"},
- {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"},
+ {file = "pytest-8.1.2-py3-none-any.whl", hash = "sha256:6c06dc309ff46a05721e6fd48e492a775ed8165d2ecdf57f156a80c7e95bb142"},
+ {file = "pytest-8.1.2.tar.gz", hash = "sha256:f3c45d1d5eed96b01a2aea70dee6a4a366d51d38f9957768083e4fecfc77f3ef"},
]
[package.dependencies]
@@ -1049,14 +1049,14 @@ files = [
[[package]]
name = "redis"
-version = "5.0.3"
+version = "5.0.4"
description = "Python client for Redis database and key-value store"
category = "main"
optional = true
python-versions = ">=3.7"
files = [
- {file = "redis-5.0.3-py3-none-any.whl", hash = "sha256:5da9b8fe9e1254293756c16c008e8620b3d15fcc6dde6babde9541850e72a32d"},
- {file = "redis-5.0.3.tar.gz", hash = "sha256:4973bae7444c0fbed64a06b87446f79361cb7e4ec1538c022d696ed7a5015580"},
+ {file = "redis-5.0.4-py3-none-any.whl", hash = "sha256:7adc2835c7a9b5033b7ad8f8918d09b7344188228809c98df07af226d39dec91"},
+ {file = "redis-5.0.4.tar.gz", hash = "sha256:ec31f2ed9675cc54c21ba854cfe0462e6faf1d83c8ce5944709db8a4700b9c61"},
]
[package.dependencies]
diff --git a/src/pyproject.toml b/src/pyproject.toml
index 14ff567..48e2c67 100644
--- a/src/pyproject.toml
+++ b/src/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "offat"
-version = "0.17.2"
+version = "0.17.3"
description = "Offensive API tester tool automates checks for common API vulnerabilities"
authors = ["Dhrumil Mistry "]
license = "MIT"