diff --git a/.gitignore b/.gitignore index 1f280df..d24a112 100644 --- a/.gitignore +++ b/.gitignore @@ -199,8 +199,11 @@ specs.yaml specs.json swagger.yaml swagger.json +oas.yml *.json ## unknown data .DS_Store -oas.yml + +## local testing scripts +test.py \ No newline at end of file diff --git a/src/offat/__main__.py b/src/offat/__main__.py index e404941..555ce59 100644 --- a/src/offat/__main__.py +++ b/src/offat/__main__.py @@ -126,6 +126,14 @@ def start(): action='store_true', help='Captures failed requests due to any exceptions into output file', ) + parser.add_argument( + '--server', + dest='server_url', + type=str, + default=None, + required=False, + help='server/host base url to overwrite from OAS/Swagger file', + ) args = parser.parse_args() # convert req headers str to dict @@ -142,7 +150,7 @@ def start(): test_data_config = validate_config_file_data(test_data_config) # parse args and run tests - api_parser = create_parser(args.fpath) + api_parser = create_parser(args.fpath, server_url=args.server_url) generate_and_run_tests( api_parser=api_parser, regex_pattern=args.path_regex_pattern, diff --git a/src/offat/http.py b/src/offat/http.py index 39a6277..9e6620c 100644 --- a/src/offat/http.py +++ b/src/offat/http.py @@ -36,6 +36,7 @@ def validate_proxy(self, proxy_url: str | None): True if the proxy URL seems valid and a basic connection can be established, False otherwise. """ # Check for valid URL format + # TODO: implement url parse security: https://docs.python.org/3/library/urllib.parse.html#url-parsing-security parsed_url = urlparse(proxy_url) if all([parsed_url.scheme, parsed_url.netloc]): return True diff --git a/src/offat/parsers/__init__.py b/src/offat/parsers/__init__.py index c9c3974..39dc176 100644 --- a/src/offat/parsers/__init__.py +++ b/src/offat/parsers/__init__.py @@ -1,6 +1,5 @@ -from sys import exit -from requests import get as http_get from json import loads as json_load, JSONDecodeError +from requests import get as http_get from .openapi import OpenAPIv3Parser from .swagger import SwaggerParser from .parser import BaseParser @@ -9,7 +8,9 @@ def create_parser( - fpath_or_url: str, spec: dict | None = None + fpath_or_url: str, + spec: dict | None = None, + server_url: str | None = None, ) -> SwaggerParser | OpenAPIv3Parser | None: '''returns parser based on doc file''' if fpath_or_url and is_valid_url(fpath_or_url): @@ -29,12 +30,14 @@ def create_parser( exit(-1) try: - parser = BaseParser(file_or_url=fpath_or_url, spec=spec) + parser = BaseParser(file_or_url=fpath_or_url, spec=spec, server_url=server_url) except OSError: logger.error('File Not Found') exit(-1) if parser.is_v3: - return OpenAPIv3Parser(file_or_url=fpath_or_url, spec=spec) + return OpenAPIv3Parser( + file_or_url=fpath_or_url, spec=spec, server_url=server_url + ) - return SwaggerParser(fpath_or_url=fpath_or_url, spec=spec) + return SwaggerParser(fpath_or_url=fpath_or_url, spec=spec, server_url=server_url) diff --git a/src/offat/parsers/openapi.py b/src/offat/parsers/openapi.py index ea49b8f..16c3de4 100644 --- a/src/offat/parsers/openapi.py +++ b/src/offat/parsers/openapi.py @@ -2,6 +2,7 @@ module to parse OAS v3 documentation JSON/YAML files. ''' from .parser import BaseParser +from ..utils import parse_server_url from ..logger import logger @@ -11,22 +12,34 @@ class InvalidOpenAPIv3File(Exception): class OpenAPIv3Parser(BaseParser): '''OpenAPI v3 Spec File Parser''' - # while adding new method to this class, make sure same method is present in SwaggerParser class + # while adding new method to this class, make sure same method is present in SwaggerParser class - def __init__(self, file_or_url: str, spec: dict | None = None) -> None: - super().__init__(file_or_url=file_or_url, spec=spec) # noqa + def __init__( + self, file_or_url: str, spec: dict | None = None, *args, **kwargs + ) -> None: + super().__init__(file_or_url=file_or_url, spec=spec, *args, **kwargs) # noqa if not self.is_v3: - raise InvalidOpenAPIv3File("Invalid OAS v3 file") + raise InvalidOpenAPIv3File('Invalid OAS v3 file') - self._populate_hosts() self.http_scheme = self._get_scheme() - self.api_base_path = self.specification.get('basePath', '') + + # save hosts in self.hosts + self._populate_hosts() + + # raise error if host data not found + if not (self.hosts and self.hosts[0]): + raise ValueError('Host is invalid or not found') + + # parse and set host data + host_dict = self.hosts[0] + self.http_scheme = host_dict['scheme'] + self.host = f'{host_dict["host"]}:{host_dict["port"]}' + self.api_base_path = host_dict['basepath'] self.base_url = f"{self.http_scheme}://{self.host}" self.request_response_params = self._get_request_response_params() - def _populate_hosts(self): servers = self.specification.get('servers', []) hosts = [] @@ -35,14 +48,25 @@ def _populate_hosts(self): raise InvalidOpenAPIv3File('Server URLs Not Found in spec file') for server in servers: - host = server.get('url', '').removeprefix( - 'https://').removeprefix('http://').removesuffix('/') - host = None if host == '' else host - hosts.append(host) + # host = ( + # server.get('url', '') + # .removeprefix('https://') + # .removeprefix('http://') + # .removesuffix('/') + # ) + # host = None if host == '' else host + scheme, host, port, basepath = parse_server_url(url=server.get('url')) + + hosts.append( + { + 'scheme': scheme, + 'host': host, + 'port': port, + 'basepath': basepath, + } + ) self.hosts = hosts - self.host = self.hosts[0] - def _get_scheme(self): servers = self.specification.get('servers', []) @@ -53,20 +77,20 @@ def _get_scheme(self): scheme = 'https' if 'https' in schemes else 'http' return scheme - - def _fetch_schema_from_spec(self, param_schema_ref:str) -> dict: + def _fetch_schema_from_spec(self, param_schema_ref: str) -> dict: schema_spec_path = param_schema_ref.split('/')[1:] - + if len(schema_spec_path) > 3: - logger.error('Schema spec $ref path should not be greater than 3 (excluding #)') + logger.error( + 'Schema spec $ref path should not be greater than 3 (excluding #)' + ) return {} - - schema_data:dict = self.specification + + schema_data: dict = self.specification for child_ele in schema_spec_path: - schema_data:dict = schema_data.get(child_ele, {}) + schema_data: dict = schema_data.get(child_ele, {}) return schema_data - def _get_param_definition_schema(self, param: dict): '''Returns Model defined schema for the passed param''' @@ -96,13 +120,20 @@ def _get_response_definition_schema(self, responses: dict): if content: status_code_content_type_responses = content.keys() for status_code_content_type in status_code_content_type_responses: - status_code_content = responses[status_code]['content'][status_code_content_type].keys() + status_code_content = responses[status_code]['content'][ + status_code_content_type + ].keys() if 'parameters' in status_code_content: - responses[status_code]['schema'] = responses[status_code]['content'][status_code_content_type]['parameters'] + responses[status_code]['schema'] = responses[status_code][ + 'content' + ][status_code_content_type]['parameters'] elif 'schema' in status_code_content: - responses[status_code]['schema'] = self._get_param_definition_schema( - responses[status_code]['content'][status_code_content_type]) - + responses[status_code][ + 'schema' + ] = self._get_param_definition_schema( + responses[status_code]['content'][status_code_content_type] + ) + else: # Fetch $ref schema directly ref = responses[status_code].get('$ref', None) @@ -111,7 +142,6 @@ def _get_response_definition_schema(self, responses: dict): return responses - def _get_request_response_params(self): '''Returns Schema of requests and response params @@ -133,43 +163,52 @@ def _get_request_response_params(self): if http_method not in ['get', 'put', 'post', 'delete', 'options']: continue - request_parameters = paths[path][http_method].get( - 'parameters', []) + request_parameters = paths[path][http_method].get('parameters', []) # create list of parameters: Fetch object schema from OAS file body_params = [] - body_parameter_keys = paths[path][http_method].get( - 'requestBody', {}).get('content', {}) + body_parameter_keys = ( + paths[path][http_method].get('requestBody', {}).get('content', {}) + ) for body_parameter_key in body_parameter_keys: - body_parameters_dict = paths[path][http_method]['requestBody']['content'][body_parameter_key] + body_parameters_dict = paths[path][http_method]['requestBody'][ + 'content' + ][body_parameter_key] required = paths[path][http_method]['requestBody'].get('required') - description = paths[path][http_method]['requestBody'].get('description') + description = paths[path][http_method]['requestBody'].get( + 'description' + ) body_param = self._get_param_definition_schema(body_parameters_dict) - body_params.append({ - 'in': 'body', - 'name': body_parameter_key, - 'description': description, - 'required': required, - 'schema': body_param, - }) + body_params.append( + { + 'in': 'body', + 'name': body_parameter_key, + 'description': description, + 'required': required, + 'schema': body_param, + } + ) response_params = [] response_params = self._get_response_definition_schema( - paths[path][http_method].get('responses', {})) + paths[path][http_method].get('responses', {}) + ) # add body param to request param request_parameters += body_params - requests.append({ - 'http_method': http_method, - 'path': path, - 'request_params': request_parameters, - 'response_params': response_params, - 'path_params': path_params, - 'body_params': body_params, - }) + requests.append( + { + 'http_method': http_method, + 'path': path, + 'request_params': request_parameters, + 'response_params': response_params, + 'path_params': path_params, + 'body_params': body_params, + } + ) return requests diff --git a/src/offat/parsers/parser.py b/src/offat/parsers/parser.py index 035b415..a438448 100644 --- a/src/offat/parsers/parser.py +++ b/src/offat/parsers/parser.py @@ -1,31 +1,47 @@ from openapi_spec_validator import validate from openapi_spec_validator.readers import read_from_filename from ..logger import logger +from ..utils import parse_server_url class InvalidSpecVersion(Exception): - '''Exception to be raised ''' + '''Exception to be raised''' + pass class BaseParser: - def __init__(self, file_or_url: str, spec: dict = None) -> None: + def __init__( + self, file_or_url: str, spec: dict | None = None, server_url: str | None = None + ) -> None: if spec: - self.specification:dict = spec - base_uri = "" + self.specification: dict = spec + base_uri = '' else: self.specification, base_uri = read_from_filename(file_or_url) + self.is_v3 = self._get_oas_version() == 3 + + # overwrite server if present according to OAS version + if self.is_v3 and server_url: + self.specification['servers'] = [{'url': server_url}] + elif server_url: + scheme, host, port, basepath = parse_server_url(url=server_url) + basepath = '/' if basepath == '' else basepath + self.specification['host'] = f'{host}:{port}' + self.specification['schemes'] = [scheme] + self.specification['basePath'] = basepath + try: validate(spec=self.specification, base_uri=base_uri) self.valid = True except Exception as e: - logger.warning("OAS/Swagger file is invalid!") - logger.error('Failed to validate spec %s due to err: %s', file_or_url, repr(e)) + logger.warning('OAS/Swagger file is invalid!') + logger.error( + 'Failed to validate spec %s due to err: %s', file_or_url, repr(e) + ) self.valid = False - self.is_v3 = self._get_oas_version() == 3 - self.hosts = [] def _get_oas_version(self): @@ -33,7 +49,7 @@ def _get_oas_version(self): return 3 elif self.specification.get('swagger'): return 2 - raise InvalidSpecVersion("only openapi and swagger specs are supported for now") + raise InvalidSpecVersion('only openapi and swagger specs are supported for now') def _get_endpoints(self): '''Returns list of endpoint paths along with HTTP methods allowed''' diff --git a/src/offat/parsers/swagger.py b/src/offat/parsers/swagger.py index 764387d..98ec5ad 100644 --- a/src/offat/parsers/swagger.py +++ b/src/offat/parsers/swagger.py @@ -11,12 +11,15 @@ class InvalidSwaggerFile(Exception): class SwaggerParser(BaseParser): '''Swagger Spec file Parser''' + # while adding new method to this class, make sure same method is present in OpenAPIv3Parser class - def __init__(self, fpath_or_url: str, spec: dict | None = None) -> None: - super().__init__(file_or_url=fpath_or_url, spec=spec) # noqa + def __init__( + self, fpath_or_url: str, spec: dict | None = None, *args, **kwargs + ) -> None: + super().__init__(file_or_url=fpath_or_url, spec=spec, *args, **kwargs) # noqa if self.is_v3: - raise InvalidSwaggerFile("Invalid OAS v3 file") + raise InvalidSwaggerFile('Invalid OAS v3 file') self._populate_hosts() self.http_scheme = self._get_scheme() @@ -47,8 +50,7 @@ def _get_param_definition_schema(self, param: dict): if param_schema_ref: model_slug = param_schema_ref.split('/')[-1] - param_schema = self.specification.get( - 'definitions', {}).get(model_slug) + param_schema = self.specification.get('definitions', {}).get(model_slug) return param_schema @@ -67,7 +69,8 @@ def _get_response_definition_schema(self, responses: dict): responses[status_code]['schema'] = responses[status_code]['parameters'] elif 'schema' in status_code_response: responses[status_code]['schema'] = self._get_param_definition_schema( - responses[status_code]) + responses[status_code] + ) else: continue @@ -95,21 +98,23 @@ def _get_request_response_params(self): continue # below var contains overall params - request_parameters = paths[path][http_method].get( - 'parameters', []) + request_parameters = paths[path][http_method].get('parameters', []) response_params = self._get_response_definition_schema( - paths[path][http_method].get('responses', {})) + paths[path][http_method].get('responses', {}) + ) # create list of parameters: Fetch object schema from OAS file for param in request_parameters: param['schema'] = self._get_param_definition_schema(param) - requests.append({ - 'http_method': http_method, - 'path': path, - 'request_params': request_parameters, - 'response_params': response_params, - 'path_params': path_params, - }) + requests.append( + { + 'http_method': http_method, + 'path': path, + 'request_params': request_parameters, + 'response_params': response_params, + 'path_params': path_params, + } + ) return requests diff --git a/src/offat/tester/test_generator.py b/src/offat/tester/test_generator.py index ccbed1f..5069d5d 100644 --- a/src/offat/tester/test_generator.py +++ b/src/offat/tester/test_generator.py @@ -2,8 +2,9 @@ from .fuzzer import fill_params from .post_test_processor import PostTestFiltersEnum from .fuzzer import generate_random_int -from ..parsers import SwaggerParser, OpenAPIv3Parser from ..config_data_handler import populate_user_data +from ..parsers import SwaggerParser, OpenAPIv3Parser +from ..utils import join_uri_path class TestGenerator: @@ -20,7 +21,7 @@ class TestGenerator: sqli_fuzz_params: Performs SQL injection (SQLi) parameter fuzzing based on the provided OpenAPIParser instance. """ - def __init__(self, headers: dict = None) -> None: + def __init__(self, headers: dict = None) -> None: """ Initializes an instance of the TestGenerator class. @@ -41,7 +42,7 @@ def check_unsupported_http_methods( openapi_parser: SwaggerParser | OpenAPIv3Parser, success_codes: list[int] = [200, 201, 301, 302], *args, - **kwargs + **kwargs, ): '''Checks whether endpoint supports undocumented/unsupported HTTP methods @@ -72,7 +73,7 @@ def check_unsupported_http_methods( 'methods': [], 'body_params': [], 'query_params': [], - 'path_params': [] + 'path_params': [], } endpoints_index[endpoint]['endpoints'].append(fuzzed_endpoint_data) @@ -80,47 +81,51 @@ def check_unsupported_http_methods( endpoints_index[endpoint]['methods'].append(method.lower()) endpoints_index[endpoint]['body_params'].extend( - fuzzed_endpoint_data['body_params']) + fuzzed_endpoint_data['body_params'] + ) endpoints_index[endpoint]['query_params'].extend( - fuzzed_endpoint_data['query_params']) + fuzzed_endpoint_data['query_params'] + ) endpoints_index[endpoint]['path_params'].extend( - fuzzed_endpoint_data['path_params']) + fuzzed_endpoint_data['path_params'] + ) for endpoint, endpoint_dict in endpoints_index.items(): methods_allowed = endpoint_dict.get('methods', []) body_params = endpoint_dict.get('body_params', []) path_params = endpoint_dict.get('path_params', []) query_params = endpoint_dict.get('query_params', []) - url = f'{openapi_parser.base_url}{endpoint}' + url = join_uri_path(openapi_parser.base_url, endpoint) http_methods: set = {'get', 'post', 'put', 'delete', 'options'} restricted_methods = http_methods - set(methods_allowed) for restricted_method in restricted_methods: - tasks.append({ - 'test_name': 'UnSupported HTTP Method Check', - 'url': url, - 'endpoint': endpoint, - 'method': restricted_method.upper(), - 'malicious_payload': [], - 'args': args, - 'kwargs': kwargs, - 'result_details': { - True: 'Endpoint does not perform any HTTP method which is not documented', # passed - False: 'Endpoint performs HTTP method which is not documented', # failed - }, - 'body_params': body_params, - 'query_params': query_params, - 'path_params': path_params, - 'success_codes': success_codes, - 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name - }) + tasks.append( + { + 'test_name': 'UnSupported HTTP Method Check', + 'url': url, + 'endpoint': endpoint, + 'method': restricted_method.upper(), + 'malicious_payload': [], + 'args': args, + 'kwargs': kwargs, + 'result_details': { + True: 'Endpoint does not perform any HTTP method which is not documented', # passed + False: 'Endpoint performs HTTP method which is not documented', # failed + }, + 'body_params': body_params, + 'query_params': query_params, + 'path_params': path_params, + 'success_codes': success_codes, + 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name, + } + ) return tasks def __get_request_params_list(self, request_params: list[dict]): - '''Get list of request parameters - ''' + '''Get list of request parameters''' payload_data = [] for request_param in request_params: param_pos = request_param.get('in') @@ -132,16 +137,20 @@ def __get_request_params_list(self, request_params: list[dict]): for prop in props.keys(): prop_type = props[prop].get('type') - payload_data.append({ - 'in': param_pos, - 'name': prop, - 'type': prop_type, - 'required': prop in required_params, - }) + payload_data.append( + { + 'in': param_pos, + 'name': prop, + 'type': prop_type, + 'required': prop in required_params, + } + ) return payload_data - def __fuzz_request_params(self, openapi_parser: SwaggerParser | OpenAPIv3Parser) -> list[dict]: + def __fuzz_request_params( + self, openapi_parser: SwaggerParser | OpenAPIv3Parser + ) -> list[dict]: """ Fuzzes Request params available in different positions and returns a list of tasks @@ -164,11 +173,14 @@ def __fuzz_request_params(self, openapi_parser: SwaggerParser | OpenAPIv3Parser) # get params based on their position in request request_body_params = list( - filter(lambda x: x.get('in') == 'body', request_params)) + filter(lambda x: x.get('in') == 'body', request_params) + ) request_query_params = list( - filter(lambda x: x.get('in') == 'query', request_params)) + filter(lambda x: x.get('in') == 'query', request_params) + ) path_params_in_body = list( - filter(lambda x: x.get('in') == 'path', request_params)) + filter(lambda x: x.get('in') == 'path', request_params) + ) # handle path params from path_params # and replace path params by value in @@ -183,17 +195,24 @@ def __fuzz_request_params(self, openapi_parser: SwaggerParser | OpenAPIv3Parser) path_param_value = path_param.get('value') endpoint_path = endpoint_path.replace( - '{' + str(path_param_name) + '}', str(path_param_value)) - - tasks.append({ - 'url': f'{base_url}{openapi_parser.api_base_path}{endpoint_path}', - 'endpoint': f'{openapi_parser.api_base_path}{endpoint_path}', - 'method': path_obj.get('http_method', '').upper(), - 'body_params': request_body_params, - 'query_params': request_query_params, - 'path_params': path_params, - # 'malicious_payload':path_params, - }) + '{' + str(path_param_name) + '}', str(path_param_value) + ) + + tasks.append( + { + 'url': join_uri_path( + base_url, openapi_parser.api_base_path, endpoint_path + ), + 'endpoint': join_uri_path( + openapi_parser.api_base_path, endpoint_path + ), + 'method': path_obj.get('http_method', '').upper(), + 'body_params': request_body_params, + 'query_params': request_query_params, + 'path_params': path_params, + # 'malicious_payload':path_params, + } + ) return tasks @@ -221,11 +240,11 @@ def __inject_payload_in_params(self, request_params: list[dict], payload: str): return request_params def sqli_fuzz_params_test( - self, - openapi_parser: SwaggerParser | OpenAPIv3Parser, - success_codes: list[int] = [500], - *args, - **kwargs + self, + openapi_parser: SwaggerParser | OpenAPIv3Parser, + success_codes: list[int] = [500], + *args, + **kwargs, ): '''Performs SQL injection (SQLi) parameter fuzzing based on the provided OpenAPIParser instance. @@ -262,12 +281,14 @@ def sqli_fuzz_params_test( # handle body request params body_request_params = request_obj.get('body_params', []) malicious_body_request_params = self.__inject_payload_in_params( - body_request_params, sqli_payload) + body_request_params, sqli_payload + ) # handle query request params query_request_params = request_obj.get('query_params', []) malicious_query_request_params = self.__inject_payload_in_params( - query_request_params, sqli_payload) + query_request_params, sqli_payload + ) # BUG: for few SQLi test, path params injected value is not matching with final URI path params in output request_obj['test_name'] = 'SQLi Test' @@ -284,17 +305,19 @@ def sqli_fuzz_params_test( False: 'One or more parameter is vulnerable to SQL Injection Attack', # failed } request_obj['success_codes'] = success_codes - request_obj['response_filter'] = PostTestFiltersEnum.STATUS_CODE_FILTER.name + request_obj[ + 'response_filter' + ] = PostTestFiltersEnum.STATUS_CODE_FILTER.name tasks.append(deepcopy(request_obj)) return tasks def sqli_in_uri_path_fuzz_test( - self, - openapi_parser: SwaggerParser | OpenAPIv3Parser, - success_codes: list[int] = [500], - *args, - **kwargs + self, + openapi_parser: SwaggerParser | OpenAPIv3Parser, + success_codes: list[int] = [500], + *args, + **kwargs, ): '''Generate Tests for SQLi in endpoint path @@ -315,7 +338,10 @@ def sqli_in_uri_path_fuzz_test( # filter path containing params in path endpoints_with_param_in_path = list( - filter(lambda path_obj: '/{' in path_obj.get('path'), request_response_params)) + filter( + lambda path_obj: '/{' in path_obj.get('path'), request_response_params + ) + ) basic_sqli_payloads = [ "' OR 1=1 ;--", @@ -334,7 +360,8 @@ def sqli_in_uri_path_fuzz_test( # get request body params request_body_params = list( - filter(lambda x: x.get('in') == 'body', request_params)) + filter(lambda x: x.get('in') == 'body', request_params) + ) # handle path params from path_params # and replace path params by value in @@ -343,7 +370,8 @@ def sqli_in_uri_path_fuzz_test( path_params = path_obj.get('path_params', []) path_params_in_body = list( - filter(lambda x: x.get('in') == 'path', request_params)) + filter(lambda x: x.get('in') == 'path', request_params) + ) path_params += path_params_in_body path_params = fill_params(path_params, openapi_parser.is_v3) @@ -351,38 +379,46 @@ def sqli_in_uri_path_fuzz_test( path_param_name = path_param.get('name') # path_param_value = path_param.get('value') endpoint_path = endpoint_path.replace( - '{' + str(path_param_name) + '}', str(sqli_payload)) + '{' + str(path_param_name) + '}', str(sqli_payload) + ) request_query_params = list( - filter(lambda x: x.get('in') == 'query', request_params)) - - tasks.append({ - 'test_name': 'SQLi Test in URI Path with Fuzzed Params', - 'url': f'{base_url}{openapi_parser.api_base_path}{endpoint_path}', - 'endpoint': f'{openapi_parser.api_base_path}{endpoint_path}', - 'method': path_obj.get('http_method').upper(), - 'body_params': request_body_params, - 'query_params': request_query_params, - 'path_params': path_params, - 'malicious_payload': sqli_payload, - 'args': args, - 'kwargs': kwargs, - 'result_details': { - True: 'Endpoint is not vulnerable to SQLi', # passed - False: 'Endpoint might be vulnerable to SQli', # failed - }, - 'success_codes': success_codes, - 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name - }) + filter(lambda x: x.get('in') == 'query', request_params) + ) + + tasks.append( + { + 'test_name': 'SQLi Test in URI Path with Fuzzed Params', + 'url': join_uri_path( + base_url, openapi_parser.api_base_path, endpoint_path + ), + 'endpoint': join_uri_path( + openapi_parser.api_base_path, endpoint_path + ), + 'method': path_obj.get('http_method').upper(), + 'body_params': request_body_params, + 'query_params': request_query_params, + 'path_params': path_params, + 'malicious_payload': sqli_payload, + 'args': args, + 'kwargs': kwargs, + 'result_details': { + True: 'Endpoint is not vulnerable to SQLi', # passed + False: 'Endpoint might be vulnerable to SQli', # failed + }, + 'success_codes': success_codes, + 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name, + } + ) return tasks def bola_fuzz_path_test( - self, - openapi_parser: SwaggerParser | OpenAPIv3Parser, - success_codes: list[int] = [200, 201, 301], - *args, - **kwargs + self, + openapi_parser: SwaggerParser | OpenAPIv3Parser, + success_codes: list[int] = [200, 201, 301], + *args, + **kwargs, ): '''Generate Tests for BOLA in endpoint path @@ -403,7 +439,10 @@ def bola_fuzz_path_test( # filter path containing params in path endpoints_with_param_in_path = list( - filter(lambda path_obj: '/{' in path_obj.get('path'), request_response_params)) + filter( + lambda path_obj: '/{' in path_obj.get('path'), request_response_params + ) + ) tasks = [] for path_obj in endpoints_with_param_in_path: @@ -413,7 +452,8 @@ def bola_fuzz_path_test( # get request body params request_body_params = list( - filter(lambda x: x.get('in') == 'body', request_params)) + filter(lambda x: x.get('in') == 'body', request_params) + ) # handle path params from path_params # and replace path params by value in @@ -422,7 +462,8 @@ def bola_fuzz_path_test( path_params = path_obj.get('path_params', []) path_params_in_body = list( - filter(lambda x: x.get('in') == 'path', request_params)) + filter(lambda x: x.get('in') == 'path', request_params) + ) path_params += path_params_in_body path_params = fill_params(path_params, openapi_parser.is_v3) @@ -430,39 +471,47 @@ def bola_fuzz_path_test( path_param_name = path_param.get('name') path_param_value = path_param.get('value') endpoint_path = endpoint_path.replace( - '{' + str(path_param_name) + '}', str(path_param_value)) + '{' + str(path_param_name) + '}', str(path_param_value) + ) request_query_params = list( - filter(lambda x: x.get('in') == 'query', request_params)) - - tasks.append({ - 'test_name': 'BOLA Path Test with Fuzzed Params', - # f'{base_url}{endpoint_path}', - 'url': f'{base_url}{openapi_parser.api_base_path}{endpoint_path}', - 'endpoint': f'{openapi_parser.api_base_path}{endpoint_path}', - 'method': path_obj.get('http_method').upper(), - 'body_params': request_body_params, - 'query_params': request_query_params, - 'path_params': path_params, - 'malicious_payload': path_params, - 'args': args, - 'kwargs': kwargs, - 'result_details': { - True: 'Endpoint is not vulnerable to BOLA', # passed - False: 'Endpoint might be vulnerable to BOLA', # failed - }, - 'success_codes': success_codes, - 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name - }) + filter(lambda x: x.get('in') == 'query', request_params) + ) + + tasks.append( + { + 'test_name': 'BOLA Path Test with Fuzzed Params', + # f'{base_url}{endpoint_path}', + 'url': join_uri_path( + base_url, openapi_parser.api_base_path, endpoint_path + ), + 'endpoint': join_uri_path( + openapi_parser.api_base_path, endpoint_path + ), + 'method': path_obj.get('http_method').upper(), + 'body_params': request_body_params, + 'query_params': request_query_params, + 'path_params': path_params, + 'malicious_payload': path_params, + 'args': args, + 'kwargs': kwargs, + 'result_details': { + True: 'Endpoint is not vulnerable to BOLA', # passed + False: 'Endpoint might be vulnerable to BOLA', # failed + }, + 'success_codes': success_codes, + 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name, + } + ) return tasks def bola_fuzz_trailing_slash_path_test( - self, - openapi_parser: SwaggerParser | OpenAPIv3Parser, - success_codes: list[int] = [200, 201, 301], - *args, - **kwargs + self, + openapi_parser: SwaggerParser | OpenAPIv3Parser, + success_codes: list[int] = [200, 201, 301], + *args, + **kwargs, ): '''Generate Tests for BOLA in endpoint path @@ -489,11 +538,14 @@ def bola_fuzz_trailing_slash_path_test( # get params based on their position in request request_body_params = list( - filter(lambda x: x.get('in') == 'body', request_params)) + filter(lambda x: x.get('in') == 'body', request_params) + ) request_query_params = list( - filter(lambda x: x.get('in') == 'query', request_params)) + filter(lambda x: x.get('in') == 'query', request_params) + ) path_params_in_body = list( - filter(lambda x: x.get('in') == 'path', request_params)) + filter(lambda x: x.get('in') == 'path', request_params) + ) # handle path params from path_params # and replace path params by value in @@ -507,33 +559,38 @@ def bola_fuzz_trailing_slash_path_test( path_param_name = path_param.get('name') path_param_value = path_param.get('value') endpoint_path = endpoint_path.replace( - '{' + str(path_param_name) + '}', str(path_param_value)) + '{' + str(path_param_name) + '}', str(path_param_value) + ) # generate URL for BOLA attack - url = f'{base_url}{openapi_parser.api_base_path}{endpoint_path}' + url = join_uri_path(base_url, openapi_parser.api_base_path, endpoint_path) if url.endswith('/'): url = f'{url}{generate_random_int()}' else: url = f'{url}/{generate_random_int()}' - tasks.append({ - 'test_name': 'BOLA Path Trailing Slash Test', - 'url': url, - 'endpoint': f'{openapi_parser.api_base_path}{endpoint_path}', - 'method': path_obj.get('http_method').upper(), - 'body_params': request_body_params, - 'query_params': request_query_params, - 'path_params': path_params, - 'malicious_payload': [], - 'args': args, - 'kwargs': kwargs, - 'result_details': { - True: 'Endpoint might not vulnerable to BOLA', # passed - False: 'Endpoint might be vulnerable to BOLA', # failed - }, - 'success_codes': success_codes, - 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name - }) + tasks.append( + { + 'test_name': 'BOLA Path Trailing Slash Test', + 'url': url, + 'endpoint': join_uri_path( + openapi_parser.api_base_path, endpoint_path + ), + 'method': path_obj.get('http_method').upper(), + 'body_params': request_body_params, + 'query_params': request_query_params, + 'path_params': path_params, + 'malicious_payload': [], + 'args': args, + 'kwargs': kwargs, + 'result_details': { + True: 'Endpoint might not vulnerable to BOLA', # passed + False: 'Endpoint might be vulnerable to BOLA', # failed + }, + 'success_codes': success_codes, + 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name, + } + ) return tasks @@ -543,7 +600,7 @@ def _inject_response_params(self, response_params: dict, is_v3: bool = False): Args: body_params ([dict]) : dict of response from openapi documentation - {'200':{'properties':{'schema':{'test_param':{'type':'str'}}}}} + {'200':{'properties':{'schema':{'test_param':{'type':'str'}}}}} Returns: list[dict]: list of dict containing test case for endpoint @@ -567,11 +624,11 @@ def _inject_response_params(self, response_params: dict, is_v3: bool = False): return params def bopla_fuzz_test( - self, - openapi_parser: SwaggerParser | OpenAPIv3Parser, - success_codes: list[int] = [200, 201, 301], - *args, - **kwargs + self, + openapi_parser: SwaggerParser | OpenAPIv3Parser, + success_codes: list[int] = [200, 201, 301], + *args, + **kwargs, ): '''Generate Tests for BOPLA/Mass Assignment Vulnerability @@ -598,11 +655,14 @@ def bopla_fuzz_test( # get params based on their position in request request_body_params = list( - filter(lambda x: x.get('in') == 'body', request_params)) + filter(lambda x: x.get('in') == 'body', request_params) + ) request_query_params = list( - filter(lambda x: x.get('in') == 'query', request_params)) + filter(lambda x: x.get('in') == 'query', request_params) + ) path_params_in_body = list( - filter(lambda x: x.get('in') == 'path', request_params)) + filter(lambda x: x.get('in') == 'path', request_params) + ) # handle path params from path_params # and replace path params by value in @@ -616,7 +676,8 @@ def bopla_fuzz_test( path_param_name = path_param.get('name') path_param_value = path_param.get('value') endpoint_path = endpoint_path.replace( - '{' + str(path_param_name) + '}', str(path_param_value)) + '{' + str(path_param_name) + '}', str(path_param_value) + ) # assign values to response params below and add them to JSON request body response_body_params = self._inject_response_params( @@ -625,25 +686,31 @@ def bopla_fuzz_test( ) request_body_params += response_body_params - tasks.append({ - 'test_name': 'BOPLA Test', - # f'{base_url}{endpoint_path}', - 'url': f'{base_url}{openapi_parser.api_base_path}{endpoint_path}', - 'endpoint': f'{openapi_parser.api_base_path}{endpoint_path}', - 'method': path_obj.get('http_method', '').upper(), - 'body_params': request_body_params, - 'query_params': request_query_params, - 'path_params': path_params, - 'malicious_payload': response_body_params, - 'args': args, - 'kwargs': kwargs, - 'result_details': { - True: 'Endpoint might not vulnerable to BOPLA', # passed - False: 'Endpoint might be vulnerable to BOPLA', # failed - }, - 'success_codes': success_codes, - 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name - }) + tasks.append( + { + 'test_name': 'BOPLA Test', + # f'{base_url}{endpoint_path}', + 'url': join_uri_path( + base_url, openapi_parser.api_base_path, endpoint_path + ), + 'endpoint': join_uri_path( + openapi_parser.api_base_path, endpoint_path + ), + 'method': path_obj.get('http_method', '').upper(), + 'body_params': request_body_params, + 'query_params': request_query_params, + 'path_params': path_params, + 'malicious_payload': response_body_params, + 'args': args, + 'kwargs': kwargs, + 'result_details': { + True: 'Endpoint might not vulnerable to BOPLA', # passed + False: 'Endpoint might be vulnerable to BOPLA', # failed + }, + 'success_codes': success_codes, + 'response_filter': PostTestFiltersEnum.STATUS_CODE_FILTER.name, + } + ) return tasks @@ -654,13 +721,13 @@ def test_with_user_data( test_for_actor1: bool = True, test_for_actor2: bool = False, *args, - **kwargs + **kwargs, ): '''Generate Tests with user sepecified data using provided test generator method Args: user_data (dict): User specified YAML data as dict. - test_generator_method (class method): test generator class method to be used for generating API pentest tests. + test_generator_method (class method): test generator class method to be used for generating API pentest tests. test_for_actor1 (bool): Generate tests for actor1 user data test_for_actor2 (bool): Generate tests for actor2 user data *args: Variable-length positional arguments. @@ -688,13 +755,13 @@ def test_with_user_data( return new_tests def __generate_injection_fuzz_params_test( - self, - openapi_parser: SwaggerParser | OpenAPIv3Parser, - test_name: str, - result_details: dict, - payloads_data: list[dict], - *args, - **kwargs + self, + openapi_parser: SwaggerParser | OpenAPIv3Parser, + test_name: str, + result_details: dict, + payloads_data: list[dict], + *args, + **kwargs, ): '''Performs injection parameter fuzzing based on the provided OpenAPIParser instance and matches injected payload using regex in response. @@ -722,12 +789,14 @@ def __generate_injection_fuzz_params_test( # handle body request params body_request_params = request_obj.get('body_params', []) malicious_body_request_params = self.__inject_payload_in_params( - body_request_params, payload) + body_request_params, payload + ) # handle query request params query_request_params = request_obj.get('query_params', []) malicious_query_request_params = self.__inject_payload_in_params( - query_request_params, payload) + query_request_params, payload + ) request_obj['test_name'] = test_name @@ -739,15 +808,20 @@ def __generate_injection_fuzz_params_test( request_obj['malicious_payload'] = payload request_obj['result_details'] = result_details - request_obj['response_filter'] = PostTestFiltersEnum.BODY_REGEX_FILTER.name + request_obj[ + 'response_filter' + ] = PostTestFiltersEnum.BODY_REGEX_FILTER.name request_obj['response_match_regex'] = payload_dict.get( - 'response_match_regex') + 'response_match_regex' + ) tasks.append(deepcopy(request_obj)) return tasks - def os_command_injection_fuzz_params_test(self, openapi_parser: SwaggerParser | OpenAPIv3Parser): + def os_command_injection_fuzz_params_test( + self, openapi_parser: SwaggerParser | OpenAPIv3Parser + ): '''Performs OS Command injection parameter fuzzing based on the provided OpenAPIParser instance. Args: @@ -764,18 +838,9 @@ def os_command_injection_fuzz_params_test(self, openapi_parser: SwaggerParser | test_name = 'OS Command Injection Test' payloads_data = [ - { - "request_payload": "cat /etc/passwd", - "response_match_regex": r"root:.*" - }, - { - "request_payload": "cat /etc/shadow", - "response_match_regex": r"root:.*" - }, - { - "request_payload": "ls -la", - "response_match_regex": r"total\s\d+" - }, + {'request_payload': 'cat /etc/passwd', 'response_match_regex': r'root:.*'}, + {'request_payload': 'cat /etc/shadow', 'response_match_regex': r'root:.*'}, + {'request_payload': 'ls -la', 'response_match_regex': r'total\s\d+'}, ] result_details = { @@ -790,7 +855,9 @@ def os_command_injection_fuzz_params_test(self, openapi_parser: SwaggerParser | payloads_data=payloads_data, ) - def xss_html_injection_fuzz_params_test(self, openapi_parser: SwaggerParser | OpenAPIv3Parser): + def xss_html_injection_fuzz_params_test( + self, openapi_parser: SwaggerParser | OpenAPIv3Parser + ): '''Performs OS Command injection parameter fuzzing based on the provided OpenAPIParser instance. Args: @@ -808,16 +875,16 @@ def xss_html_injection_fuzz_params_test(self, openapi_parser: SwaggerParser | Op payloads_data = [ { - "request_payload": "", - "response_match_regex": r"]*>.*<\/script>", + 'request_payload': '', + 'response_match_regex': r']*>.*<\/script>', }, { - "request_payload": "", - "response_match_regex": r"]*>.*<\/script>", + 'request_payload': '', + 'response_match_regex': r']*>.*<\/script>', }, { - "request_payload": "", - "response_match_regex": r"]*>", + 'request_payload': "]*>', }, ] diff --git a/src/offat/tester/tester_utils.py b/src/offat/tester/tester_utils.py index c71a530..05e03f3 100644 --- a/src/offat/tester/tester_utils.py +++ b/src/offat/tester/tester_utils.py @@ -30,7 +30,7 @@ def is_host_up(openapi_parser: SwaggerParser | OpenAPIv3Parser) -> bool: port = 443 if openapi_parser.http_scheme == 'https' else 80 case 2: host = tokens[0] - port = tokens[1] + port = int(tokens[1]) case _: logger.warning('Invalid host: %s', openapi_parser.host) return False @@ -52,7 +52,7 @@ def is_host_up(openapi_parser: SwaggerParser | OpenAPIv3Parser) -> bool: return res.status in range(200, 499) except Exception as e: logger.error( - 'Unable to connect to host %s:%d due to error: %s', host, port, repr(e) + 'Unable to connect to host %s:%s due to error: %s', host, port, repr(e) ) return False diff --git a/src/offat/tests/utils/test_parse_server_url.py b/src/offat/tests/utils/test_parse_server_url.py new file mode 100644 index 0000000..fe9c81c --- /dev/null +++ b/src/offat/tests/utils/test_parse_server_url.py @@ -0,0 +1,35 @@ +import unittest +from pytest import raises +from ...utils import parse_server_url + + +class TestParseUrls(unittest.TestCase): + def test_valid_urls(self): + urls = [ + 'https://example.com', + 'https://owasp.org/OFFAT/', + 'http://localhost:8000/test', + 'http://127.0.0.1:8001/url/1', + ] + for url in urls: + scheme, host, port, basepath = parse_server_url(url=url) + self.assertIn( + scheme, ['http', 'https'], f'Failed to validate url scheme: {url}' + ) + self.assertIn( + host, + ['example.com', 'owasp.org', 'localhost', '127.0.0.1'], + 'Host does not match expected test cases', + ) + self.assertIn( + port, + [80, 443, 8000, 8001], + 'Port does not match according to test case', + ) + self.assertIn(basepath, ['', '/OFFAT/', '/test', '/url/1']) + + def test_invalid_urls(self): + urls = ['owasp', 'ftp://example/', '\0\0alkdsjlatest', '" OR 1==1 -- -'] + for url in urls: + with raises(ValueError): + parse_server_url(url=url) diff --git a/src/offat/tests/utils/test_url_validations.py b/src/offat/tests/utils/test_url_validations.py index 2a5fd8b..fb7209d 100644 --- a/src/offat/tests/utils/test_url_validations.py +++ b/src/offat/tests/utils/test_url_validations.py @@ -7,18 +7,13 @@ def test_valid_urls(self): urls = [ 'https://example.com', 'https://owasp.org/OFFAT/', - # 'http://localhost:8000/test', + 'http://localhost:8000/test', 'http://127.0.0.1:8001/url', ] for url in urls: self.assertTrue(is_valid_url(url=url), f'Failed to validate url: {url}') def test_invalid_urls(self): - urls = [ - 'owasp', - 'ftp://example/', - '\0\0alkdsjlatest', - '" OR 1==1 -- -' - ] + urls = ['owasp', 'ftp://example/', '\0\0alkdsjlatest', '" OR 1==1 -- -'] for url in urls: - assert is_valid_url(url=url) == False + assert is_valid_url(url=url) is False diff --git a/src/offat/utils.py b/src/offat/utils.py index e9fdac1..3b44d83 100644 --- a/src/offat/utils.py +++ b/src/offat/utils.py @@ -2,10 +2,12 @@ utils module """ from json import loads as json_load, dumps as json_dumps, JSONDecodeError -from os.path import isfile from re import compile as re_compile, match -from pkg_resources import get_distribution +from urllib.parse import urlparse, urljoin +from os.path import isfile +from importlib.metadata import version from yaml import safe_load, YAMLError + from .logger import logger @@ -18,7 +20,7 @@ def get_package_version(): Returns: String: current package version ''' - return get_distribution('offat').version + return version('offat') def read_yaml(file_path: str) -> dict: @@ -190,5 +192,79 @@ def is_valid_url(url: str) -> bool: Raises: Any exception occurred during operation ''' - url_regex = re_compile(r'https?:\/\/[a-z.-]+(:\d+)?.*') + url_regex = re_compile( + r'https?:\/\/([a-z.-]|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})+(:\d+)?.*' + ) return bool(match(url_regex, url)) + + +def parse_server_url(url: str) -> tuple: + '''Parses url and returns scheme, host, port and basepath. + + Args: + url (str): url to be parsed + + Returns: + tuple: (scheme:str, host:str, port:int|None, basepath:str|None) + + Raises: + Any exception occurred during operation + ''' + # TODO: implement url parse security https://docs.python.org/3/library/urllib.parse.html#url-parsing-security + parsed_url = urlparse(url) + + netloc = parsed_url.netloc + port = 443 if parsed_url.scheme == 'https' else 80 + if ':' in netloc: + tokens = netloc.split(':') + host = tokens[0] + try: + port = int(tokens[1]) + except ValueError: + logger.error( + 'Invalid Port Number: failed to parse port in url. Using port %d according to scheme %s', + port, + parsed_url.scheme, + ) + else: + host = netloc + + if parsed_url.scheme not in ['http', 'https']: + raise ValueError('only http and https schemes are allowed') + + return parsed_url.scheme, host, port, parsed_url.path + + +def join_uri_path(*args: str, remove_prefix: str = '/') -> str: + '''constructs url from passed args using urljoin + + Args: + *args (str): parts of uri + remove_prefix (str): prefix to be removed from uri path before + joining with previous uri path. + + Returns: + str: constructed uri + + Raises: + Any exception occurred during operation + + Example: + ```python + from offat.utils import join_uri_path + + url = join_uri_path('https://example.com:443','/v2/', '/pet/findByStatus/') + print(url) + # output: https://example.com:443/pet/findByStatus/ + ``` + ''' + url = args[0] + if not url.endswith('/'): + url += '/' + + for uri in args[1:]: + if not url.endswith('/'): + url += '/' + url = urljoin(url, uri.removeprefix(remove_prefix)) + + return url diff --git a/src/poetry.lock b/src/poetry.lock index c9a44b4..39c8964 100644 --- a/src/poetry.lock +++ b/src/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.9.3" description = "Async http client/server framework (asyncio)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -99,6 +100,7 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiolimiter" version = "1.1.0" description = "asyncio rate limiter, a leaky bucket implementation" +category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -110,6 +112,7 @@ files = [ name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -124,6 +127,7 @@ frozenlist = ">=1.1.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -135,6 +139,7 @@ files = [ name = "anyio" version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -155,6 +160,7 @@ trio = ["trio (>=0.23)"] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -166,6 +172,7 @@ files = [ name = "attrs" version = "23.2.0" description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -185,6 +192,7 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -196,6 +204,7 @@ files = [ name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -295,6 +304,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -309,7 +319,8 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -optional = true +category = "main" +optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, @@ -320,6 +331,7 @@ files = [ name = "fastapi" version = "0.109.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -339,6 +351,7 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" name = "frozenlist" version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -425,6 +438,7 @@ files = [ name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -436,6 +450,7 @@ files = [ name = "httptools" version = "0.6.1" description = "A collection of framework independent HTTP protocol utils." +category = "main" optional = true python-versions = ">=3.8.0" files = [ @@ -484,6 +499,7 @@ test = ["Cython (>=0.29.24,<0.30.0)"] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -491,10 +507,23 @@ files = [ {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + [[package]] name = "jsonschema" version = "4.21.1" description = "An implementation of JSON Schema validation for Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -516,6 +545,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- name = "jsonschema-path" version = "0.3.2" description = "JSONSchema Spec with object-oriented paths" +category = "main" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -533,6 +563,7 @@ requests = ">=2.31.0,<3.0.0" name = "jsonschema-specifications" version = "2023.12.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -547,6 +578,7 @@ referencing = ">=0.31.0" name = "lazy-object-proxy" version = "1.10.0" description = "A fast and thorough lazy object proxy." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -593,6 +625,7 @@ files = [ name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -617,6 +650,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -628,6 +662,7 @@ files = [ name = "multidict" version = "6.0.5" description = "multidict implementation" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -727,6 +762,7 @@ files = [ name = "openapi-schema-validator" version = "0.6.2" description = "OpenAPI schema validation for Python" +category = "main" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -743,6 +779,7 @@ rfc3339-validator = "*" name = "openapi-spec-validator" version = "0.7.1" description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" +category = "main" optional = false python-versions = ">=3.8.0,<4.0.0" files = [ @@ -756,10 +793,23 @@ jsonschema-path = ">=0.3.1,<0.4.0" lazy-object-proxy = ">=1.7.1,<2.0.0" openapi-schema-validator = ">=0.6.0,<0.7.0" +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + [[package]] name = "pathable" version = "0.4.3" description = "Object-oriented paths" +category = "main" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ @@ -767,10 +817,27 @@ files = [ {file = "pathable-0.4.3.tar.gz", hash = "sha256:5c869d315be50776cc8a993f3af43e0c60dc01506b399643f919034ebf4cdcab"}, ] +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "pydantic" version = "2.6.4" description = "Data validation using Python type hints" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -790,6 +857,7 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.16.3" description = "" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -881,6 +949,7 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -892,10 +961,32 @@ files = [ plugins = ["importlib-metadata"] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pytest" +version = "8.1.1" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.1-py3-none-any.whl", hash = "sha256:2a8386cfc11fa9d2c50ee7b2a57e7d898ef90470a7a34c4b949ff59662bb78b7"}, + {file = "pytest-8.1.1.tar.gz", hash = "sha256:ac978141a75948948817d360297b7aae0fcb9d6ff6bc9ec6d514b85d5a65c044"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "python-dotenv" version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -910,6 +1001,7 @@ cli = ["click (>=5.0)"] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -959,6 +1051,7 @@ files = [ name = "redis" version = "5.0.3" description = "Python client for Redis database and key-value store" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -977,6 +1070,7 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)" name = "referencing" version = "0.31.1" description = "JSON Referencing + Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -992,6 +1086,7 @@ rpds-py = ">=0.7.0" name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1013,6 +1108,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "rfc3339-validator" version = "0.1.4" description = "A pure python RFC3339 validator" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1027,6 +1123,7 @@ six = "*" name = "rich" version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -1045,6 +1142,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "rpds-py" version = "0.18.0" description = "Python bindings to Rust's persistent data structures (rpds)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1153,6 +1251,7 @@ files = [ name = "rq" version = "1.16.1" description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1168,6 +1267,7 @@ redis = ">=3.5" name = "setuptools" version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1184,6 +1284,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1195,6 +1296,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1206,6 +1308,7 @@ files = [ name = "starlette" version = "0.36.3" description = "The little ASGI library that shines." +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1223,6 +1326,7 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1235,19 +1339,21 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "typing-extensions" -version = "4.10.0" +version = "4.11.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = true python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, - {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] [[package]] name = "urllib3" version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1265,6 +1371,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "uvicorn" version = "0.23.2" description = "The lightning-fast ASGI server." +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1279,7 +1386,7 @@ h11 = ">=0.8" httptools = {version = ">=0.5.0", optional = true, markers = "extra == \"standard\""} python-dotenv = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} pyyaml = {version = ">=5.1", optional = true, markers = "extra == \"standard\""} -uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\" and extra == \"standard\""} +uvloop = {version = ">=0.14.0,<0.15.0 || >0.15.0,<0.15.1 || >0.15.1", optional = true, markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\" and extra == \"standard\""} watchfiles = {version = ">=0.13", optional = true, markers = "extra == \"standard\""} websockets = {version = ">=10.4", optional = true, markers = "extra == \"standard\""} @@ -1290,6 +1397,7 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "uvloop" version = "0.19.0" description = "Fast implementation of asyncio event loop on top of libuv" +category = "main" optional = true python-versions = ">=3.8.0" files = [ @@ -1334,6 +1442,7 @@ test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)" name = "watchfiles" version = "0.21.0" description = "Simple, modern and high performance file watching and code reload in python." +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1421,6 +1530,7 @@ anyio = ">=3.0.0" name = "websockets" version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1502,6 +1612,7 @@ files = [ name = "yarl" version = "1.9.4" description = "Yet another URL library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1607,4 +1718,4 @@ api = ["fastapi", "python-dotenv", "redis", "rq", "uvicorn"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "763a9953a1977328ea900f780b0d15a17b1844908c7168c6ab4c381797d82c97" +content-hash = "6ce8cb2d3fba2db2bedf4ab3df798b84fafb8a7e3863d31e32bae08afcb8fe49" diff --git a/src/pyproject.toml b/src/pyproject.toml index 46e802b..2bb187e 100644 --- a/src/pyproject.toml +++ b/src/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "offat" -version = "0.16.0" +version = "0.17.0" description = "Offensive API tester tool automates checks for common API vulnerabilities" authors = ["Dhrumil Mistry "] license = "MIT" @@ -35,6 +35,14 @@ offat = "offat.__main__:start" offat-api = "offat.api.__main__:start" +[tool.poetry.group.dev.dependencies] +pytest = "^8.1.1" + +[tool.pytest.ini_options] +testpaths = [ + "offat/tests", +] + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api"