From 1a58d74c1ed149c5e5eb9ca885bc6a7f207b20c8 Mon Sep 17 00:00:00 2001 From: Juan Pardo Date: Thu, 12 Oct 2023 10:54:06 +0200 Subject: [PATCH] Added import job documentation (#464) * Added import job documentation --- CHANGES.rst | 4 + croud/clusters/commands.py | 3 +- croud/printer.py | 12 ++- docs/commands/clusters.rst | 140 ++++++++++++++++++++++++++++++++ tests/commands/test_clusters.py | 65 ++++++++++++++- 5 files changed, 217 insertions(+), 7 deletions(-) diff --git a/CHANGES.rst b/CHANGES.rst index 54b444a7..da65d544 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -9,6 +9,10 @@ Unreleased - Added support in import jobs for private S3 files using organization secrets. +- Added documentation for all import jobs commands. + +- Fixed ``import-job create from-url`` throwing an error despite working. + 1.7.0 - 2023/09/11 ================== diff --git a/croud/clusters/commands.py b/croud/clusters/commands.py index 6bce4ced..64ff26ed 100644 --- a/croud/clusters/commands.py +++ b/croud/clusters/commands.py @@ -305,10 +305,9 @@ def import_jobs_list(args: Namespace) -> None: print_response( data=data, errors=errors, - keys=["id", "cluster_id", "status", "type", "url", "destination"], + keys=["id", "cluster_id", "status", "type", "destination"], output_fmt=get_output_format(args), transforms={ - "url": lambda field: field.get("url"), "destination": lambda field: field.get("table"), }, ) diff --git a/croud/printer.py b/croud/printer.py index ca39f247..52bd3901 100644 --- a/croud/printer.py +++ b/croud/printer.py @@ -158,14 +158,22 @@ def format_rows(self, rows: Union[List[JsonDict], JsonDict]) -> str: # | bar | 2 | # +-----+-----+ - headers = list(map(str, rows[0].keys())) if len(rows) else self.keys + all_keys = list(map(str, rows[0].keys())) if len(rows) else self.keys + if all_keys: + for row in rows: + for key in list(map(str, row.keys())): + if key not in all_keys: + all_keys.append(key) + + headers = all_keys if len(rows) else self.keys + if headers is None: return "" values = [ [ self.transforms.get(header, TableFormatPrinter._identity_transform)( - row[header] + row.get(header, "") ) for header in headers ] diff --git a/docs/commands/clusters.rst b/docs/commands/clusters.rst index a290e266..daac790e 100644 --- a/docs/commands/clusters.rst +++ b/docs/commands/clusters.rst @@ -586,6 +586,146 @@ Example to organization and project admins. +``clusters import-jobs`` +======================== + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs + :nosubcommands: + + + +``clusters import-jobs create`` +=============================== + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs create + :nosubcommands: + + +``clusters import-jobs create from-url`` +======================================== + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs create from-url + +Example +------- + +.. code-block:: console + + sh$ croud clusters import-jobs create from-url --cluster-id e1e38d92-a650-48f1-8a70-8133f2d5c400 \ + --file-format csv --table my_table_name --url https://s3.amazonaws.com/my.import.data.gz --compression gzip + +--------------------------------------+--------------------------------------+------------+ + | id | cluster_id | status | + |--------------------------------------+--------------------------------------+------------| + | dca4986d-f7c8-4121-af81-863cca1dab0f | e1e38d92-a650-48f1-8a70-8133f2d5c400 | REGISTERED | + +--------------------------------------+--------------------------------------+------------+ + ==> Info: Status: REGISTERED (Your import job was received and is pending processing.) + ==> Info: Done importing 3 records and 36 Bytes. + ==> Success: Operation completed. + + +``clusters import-jobs create from-file`` +========================================= + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs create from-file + +.. code-block:: console + + sh$ croud clusters import-jobs create from-file --cluster-id e1e38d92-a650-48f1-8a70-8133f2d5c400 \ + --file-format csv --table my_table_name --file-id 2e71e5a6-a21a-4e99-ae58-705a1f15635c + +--------------------------------------+--------------------------------------+------------+ + | id | cluster_id | status | + |--------------------------------------+--------------------------------------+------------| + | 9164f886-ae37-4a1b-b3fe-53f9e1897e7d | e1e38d92-a650-48f1-8a70-8133f2d5c400 | REGISTERED | + +--------------------------------------+--------------------------------------+------------+ + ==> Info: Status: REGISTERED (Your import job was received and is pending processing.) + ==> Info: Done importing 3 records and 36 Bytes. + ==> Success: Operation completed. + + +``clusters import-jobs create from-s3`` +======================================= + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs create from-s3 + +.. code-block:: console + + sh$ croud clusters import-jobs create from-s3 --cluster-id e1e38d92-a650-48f1-8a70-8133f2d5c400 \ + --secret-id 71e7c5da-51fa-44f2-b178-d95052cbe620 --bucket cratedbtestbucket \ + --file-path myfiles/cratedbimporttest.csv --file-format csv --table my_table_name + +--------------------------------------+--------------------------------------+------------+ + | id | cluster_id | status | + |--------------------------------------+--------------------------------------+------------| + | f29fdc02-edd0-4ad9-8839-9616fccf752b | e1e38d92-a650-48f1-8a70-8133f2d5c400 | REGISTERED | + +--------------------------------------+--------------------------------------+------------+ + ==> Info: Status: REGISTERED (Your import job was received and is pending processing.) + ==> Info: Done importing 3 records and 36 Bytes. + ==> Success: Operation completed. + + +``clusters import-jobs list`` +============================= + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs list + +Example +------- + +.. code-block:: console + + sh$ ❯ croud clusters import-jobs list --cluster-id e1e38d92-a650-48f1-8a70-8133f2d5c400 + +--------------------------------------+--------------------------------------+-----------+--------+-------------------+ + | id | cluster_id | status | type | destination | + |--------------------------------------+--------------------------------------+-----------+--------+-------------------| + | dca4986d-f7c8-4121-af81-863cca1dab0f | e1e38d92-a650-48f1-8a70-8133f2d5c400 | SUCCEEDED | url | my_table_name | + | 00de6048-3af6-41da-bfaa-661199d1c106 | e1e38d92-a650-48f1-8a70-8133f2d5c400 | SUCCEEDED | s3 | my_table_name | + | 035f5ec1-ba9e-4a5c-9ce1-44e9a9cab6c1 | e1e38d92-a650-48f1-8a70-8133f2d5c400 | SUCCEEDED | file | my_table_name | + +--------------------------------------+--------------------------------------+-----------+--------+-------------------+ + + +``clusters import-jobs delete`` +=============================== + +.. argparse:: + :module: croud.__main__ + :func: get_parser + :prog: croud + :path: clusters import-jobs delete + +Example +------- + +.. code-block:: console + + sh$ ❯ croud clusters import-jobs delete \ + --cluster-id e1e38d92-a650-48f1-8a70-8133f2d5c400 \ + --import-job-id 00de6048-3af6-41da-bfaa-661199d1c106 + ==> Success: Success. + + ``clusters export-jobs`` ======================== diff --git a/tests/commands/test_clusters.py b/tests/commands/test_clusters.py index 451f8493..15b7f682 100644 --- a/tests/commands/test_clusters.py +++ b/tests/commands/test_clusters.py @@ -1667,13 +1667,72 @@ def test_import_job_create_from_file(mock_request): "status": "FAILED", "type": "url", "url": {"url": "https://some"}, - } + }, + { + "cluster_id": "123", + "compression": "gzip", + "dc": { + "created": "2023-03-14T10:12:29.763000+00:00", + "modified": "2023-03-14T10:12:29.763000+00:00", + }, + "destination": {"create_table": True, "table": "croud-csv-import-two"}, + "format": "json", + "id": "a95e5a20-61f7-415f-b128-1e21ddf17513", + "progress": { + "bytes": 0, + "message": "Failed", + "records": 0, + }, + "status": "FAILED", + "type": "s3", + "s3": { + "endpoint": "https://some", + "file_path": "a-file-path", + "bucket": "bucket-name", + "secret_id": "a95e5a20-61f7-415f-b128-1e21ddf17513", + }, + }, + { + "cluster_id": "123", + "compression": "gzip", + "dc": { + "created": "2023-03-14T10:12:29.763000+00:00", + "modified": "2023-03-14T10:12:29.763000+00:00", + }, + "destination": {"create_table": True, "table": "croud-csv-import-two"}, + "format": "json", + "id": "a95e5a20-61f7-415f-b128-1e21ddf17513", + "progress": { + "bytes": 0, + "message": "Failed", + "records": 0, + }, + "status": "FAILED", + "type": "file", + "file": { + "upload_url": "https://server.test/folder/myfile.json", + "file_size": 36, + "id": "a95e5a20-61f7-415f-b128-1e21ddf17513", + "name": "my test file", + "status": "UPLOADED", + }, + }, ], None, ), ) -def test_import_job_list(mock_request): - call_command("croud", "clusters", "import-jobs", "list", "--cluster-id", "123") +@pytest.mark.parametrize("output_format", ["table", "wide"]) +def test_import_job_list(mock_request, output_format): + call_command( + "croud", + "clusters", + "import-jobs", + "list", + "--cluster-id", + "123", + "-o", + output_format, + ) assert_rest( mock_request, RequestMethod.GET,