Skip to content

Commit

Permalink
make fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
tanmay-db committed Dec 6, 2024
1 parent 4a75e63 commit c391f79
Show file tree
Hide file tree
Showing 2 changed files with 52 additions and 54 deletions.
93 changes: 46 additions & 47 deletions tests/test_errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,53 +83,52 @@ def make_private_link_response() -> requests.Response:
for x in base_subclass_test_cases]


@pytest.mark.parametrize(
'response, expected_error, expected_message', subclass_test_cases +
[(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
(fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
(fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
(make_private_link_response(), errors.PrivateLinkValidationError,
('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
'endpoint. For more information, see '
'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
),
(fake_valid_response(
'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
'/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
'/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
'/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
'/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
(fake_response(
'GET', 400,
'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
), errors.BadRequest,
'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
),
(fake_response('GET', 400, '<pre>Worker environment not ready</pre>'), errors.BadRequest,
'Worker environment not ready'),
(fake_response('GET', 400, 'this is not a real response'), errors.BadRequest,
('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. '
'Please report this issue with the following debugging information to the SDK issue tracker at '
'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n'
'< 400 Bad Request\n'
'< this is not a real response```')),
(fake_response(
'GET', 404,
json.dumps({
'detail': 'Group with id 1234 is not found',
'status': '404',
'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error']
})), errors.NotFound, 'None Group with id 1234 is not found'),
(fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound,
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```'
),
(fake_raw_response('GET', 404, b'\x80'), errors.NotFound,
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```'
)])
@pytest.mark.parametrize('response, expected_error, expected_message', subclass_test_cases + [
(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
(fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
(fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
(make_private_link_response(), errors.PrivateLinkValidationError,
('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
'endpoint. For more information, see '
'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
),
(fake_valid_response(
'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
'/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
'/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
'/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
(fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
'/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
(fake_response(
'GET', 400,
'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
), errors.BadRequest,
'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'),
(fake_response('GET', 400, '<pre>Worker environment not ready</pre>'), errors.BadRequest,
'Worker environment not ready'),
(fake_response('GET', 400, 'this is not a real response'), errors.BadRequest,
('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. '
'Please report this issue with the following debugging information to the SDK issue tracker at '
'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n'
'< 400 Bad Request\n'
'< this is not a real response```')),
(fake_response(
'GET', 404,
json.dumps({
'detail': 'Group with id 1234 is not found',
'status': '404',
'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error']
})), errors.NotFound, 'None Group with id 1234 is not found'),
(fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound,
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```'
),
(fake_raw_response('GET', 404, b'\x80'), errors.NotFound,
'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```'
)
])
def test_get_api_error(response, expected_error, expected_message):
parser = errors._Parser()
with pytest.raises(errors.DatabricksError) as e:
Expand Down
13 changes: 6 additions & 7 deletions tests/test_oauth.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,26 +10,25 @@ def test_token_cache_unique_filename_by_host():
common_args = dict(client_id="abc",
redirect_url="http://localhost:8020",
oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
assert TokenCache(host="http://localhost:",
**common_args).filename != TokenCache("https://bar.cloud.databricks.com",
**common_args).filename
assert TokenCache(host="http://localhost:", **common_args).filename != TokenCache(
"https://bar.cloud.databricks.com", **common_args).filename


def test_token_cache_unique_filename_by_client_id():
common_args = dict(host="http://localhost:",
redirect_url="http://localhost:8020",
oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def",
**common_args).filename
assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def", **
common_args).filename


def test_token_cache_unique_filename_by_scopes():
common_args = dict(host="http://localhost:",
client_id="abc",
redirect_url="http://localhost:8020",
oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"],
**common_args).filename
assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"], **
common_args).filename


def test_account_oidc_endpoints(requests_mock):
Expand Down

0 comments on commit c391f79

Please sign in to comment.