Skip to content

Commit

Permalink
Import jobs now support S3 files using an org secret (#462)
Browse files Browse the repository at this point in the history
* Import jobs now support S3 files using an org secret
  • Loading branch information
juanpardo authored Sep 28, 2023
1 parent 0092c9e commit 00e847d
Show file tree
Hide file tree
Showing 4 changed files with 100 additions and 0 deletions.
2 changes: 2 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ Unreleased

- Added support for listing, deleting and creating organization secrets.

- Added support in import jobs for private S3 files using organization secrets.

1.7.0 - 2023/09/11
==================

Expand Down
30 changes: 30 additions & 0 deletions croud/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
export_jobs_delete,
export_jobs_list,
import_jobs_create_from_file,
import_jobs_create_from_s3,
import_jobs_create_from_url,
import_jobs_delete,
import_jobs_list,
Expand Down Expand Up @@ -779,6 +780,35 @@
] + import_job_create_common_args,
"resolver": import_jobs_create_from_file,
},
"from-s3": {
"help": "Create a data import job on the specified "
"cluster from an Amazon S3 compatible "
"location.",
"extra_args": [
# Type S3 params
Argument(
"--bucket", type=str, required=True,
help="The name of the S3 bucket that contains "
"the file to be imported."
),
Argument(
"--file-path", type=str, required=True,
help="The absolute path in the S3 bucket that "
"points to the file to be imported."
),
Argument(
"--secret-id", type=str, required=True,
help="The secret that contains the access key "
"and secret key needed to access the file "
"to be imported."
),
Argument(
"--endpoint", type=str, required=False,
help="An Amazon S3 compatible endpoint."
),
] + import_job_create_common_args,
"resolver": import_jobs_create_from_s3,
},
},
},
},
Expand Down
13 changes: 13 additions & 0 deletions croud/clusters/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,6 +186,19 @@ def import_jobs_create_from_url(args: Namespace) -> None:
import_jobs_create(args, extra_payload=extra_body)


def import_jobs_create_from_s3(args: Namespace) -> None:
extra_body = {
"s3": {
"bucket": args.bucket,
"file_path": args.file_path,
"endpoint": args.endpoint or "",
"secret_id": args.secret_id,
}
}
args.type = "s3"
import_jobs_create(args, extra_payload=extra_body)


def _get_org_id_from_cluster_id(client, cluster_id: str) -> Optional[str]:
data, errors = client.get(f"/api/v2/clusters/{cluster_id}/")
if errors or not data:
Expand Down
55 changes: 55 additions & 0 deletions tests/commands/test_clusters.py
Original file line number Diff line number Diff line change
Expand Up @@ -1546,6 +1546,61 @@ def test_import_job_create_from_url(mock_request):
)


@mock.patch.object(
Client, "request", return_value=({"id": "1", "status": "SUCCEEDED"}, None)
)
def test_import_job_create_from_s3(mock_request):
cluster_id = gen_uuid()
bucket = "my-bucket-name"
file_path = "my-folder/my-file.csv.gz"
secret_id = gen_uuid()
endpoint = "https://my-s3-compatible-endpoint"
call_command(
"croud",
"clusters",
"import-jobs",
"create",
"from-s3",
"--cluster-id",
cluster_id,
"--bucket",
bucket,
"--file-path",
file_path,
"--secret-id",
secret_id,
"--endpoint",
endpoint,
"--compression",
"gzip",
"--file-format",
"csv",
"--table",
"my-table",
"--create-table",
"false",
)
body = {
"type": "s3",
"s3": {
"bucket": bucket,
"file_path": file_path,
"secret_id": secret_id,
"endpoint": endpoint,
},
"format": "csv",
"destination": {"table": "my-table", "create_table": False},
"compression": "gzip",
}
assert_rest(
mock_request,
RequestMethod.POST,
f"/api/v2/clusters/{cluster_id}/import-jobs/",
body=body,
any_times=True,
)


@mock.patch.object(
Client, "request", return_value=({"id": "1", "status": "SUCCEEDED"}, None)
)
Expand Down

0 comments on commit 00e847d

Please sign in to comment.