diff --git a/tests/test_errors.py b/tests/test_errors.py index 881f016f..42c7e42f 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -83,53 +83,52 @@ def make_private_link_response() -> requests.Response: for x in base_subclass_test_cases] -@pytest.mark.parametrize( - 'response, expected_error, expected_message', subclass_test_cases + - [(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'), - (fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'), - (fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'), - (make_private_link_response(), errors.PrivateLinkValidationError, - ('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. ' - 'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC ' - 'endpoint. For more information, see ' - 'https://docs.databricks.com/en/security/network/classic/privatelink.html.'), - ), - (fake_valid_response( - 'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist', - '/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'), - (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist', - '/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'), - (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist', - '/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'), - (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version', - '/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'), - (fake_response( - 'GET', 400, - 'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list' - ), errors.BadRequest, - 'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list' - ), - (fake_response('GET', 400, '
Worker environment not ready
'), errors.BadRequest, - 'Worker environment not ready'), - (fake_response('GET', 400, 'this is not a real response'), errors.BadRequest, - ('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. ' - 'Please report this issue with the following debugging information to the SDK issue tracker at ' - 'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n' - '< 400 Bad Request\n' - '< this is not a real response```')), - (fake_response( - 'GET', 404, - json.dumps({ - 'detail': 'Group with id 1234 is not found', - 'status': '404', - 'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error'] - })), errors.NotFound, 'None Group with id 1234 is not found'), - (fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound, - 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```' - ), - (fake_raw_response('GET', 404, b'\x80'), errors.NotFound, - 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```' - )]) +@pytest.mark.parametrize('response, expected_error, expected_message', subclass_test_cases + [ + (fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'), + (fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'), + (fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'), + (make_private_link_response(), errors.PrivateLinkValidationError, + ('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. ' + 'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC ' + 'endpoint. For more information, see ' + 'https://docs.databricks.com/en/security/network/classic/privatelink.html.'), + ), + (fake_valid_response( + 'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist', + '/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'), + (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist', + '/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'), + (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist', + '/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'), + (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version', + '/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'), + (fake_response( + 'GET', 400, + 'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list' + ), errors.BadRequest, + 'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'), + (fake_response('GET', 400, '
Worker environment not ready
'), errors.BadRequest, + 'Worker environment not ready'), + (fake_response('GET', 400, 'this is not a real response'), errors.BadRequest, + ('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. ' + 'Please report this issue with the following debugging information to the SDK issue tracker at ' + 'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n' + '< 400 Bad Request\n' + '< this is not a real response```')), + (fake_response( + 'GET', 404, + json.dumps({ + 'detail': 'Group with id 1234 is not found', + 'status': '404', + 'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error'] + })), errors.NotFound, 'None Group with id 1234 is not found'), + (fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound, + 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```' + ), + (fake_raw_response('GET', 404, b'\x80'), errors.NotFound, + 'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```' + ) +]) def test_get_api_error(response, expected_error, expected_message): parser = errors._Parser() with pytest.raises(errors.DatabricksError) as e: diff --git a/tests/test_oauth.py b/tests/test_oauth.py index a637a550..d70257c8 100644 --- a/tests/test_oauth.py +++ b/tests/test_oauth.py @@ -10,17 +10,16 @@ def test_token_cache_unique_filename_by_host(): common_args = dict(client_id="abc", redirect_url="http://localhost:8020", oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - assert TokenCache(host="http://localhost:", - **common_args).filename != TokenCache("https://bar.cloud.databricks.com", - **common_args).filename + assert TokenCache(host="http://localhost:", **common_args).filename != TokenCache( + "https://bar.cloud.databricks.com", **common_args).filename def test_token_cache_unique_filename_by_client_id(): common_args = dict(host="http://localhost:", redirect_url="http://localhost:8020", oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", - **common_args).filename + assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", ** + common_args).filename def test_token_cache_unique_filename_by_scopes(): @@ -28,8 +27,8 @@ def test_token_cache_unique_filename_by_scopes(): client_id="abc", redirect_url="http://localhost:8020", oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234")) - assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], - **common_args).filename + assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], ** + common_args).filename def test_account_oidc_endpoints(requests_mock):