From ef04906beaba71ca8d6e633b01601456f0582f47 Mon Sep 17 00:00:00 2001 From: Oleg Kulachenko Date: Sun, 1 Oct 2023 23:49:00 +0400 Subject: [PATCH] Format code using `black` System tests code has a problem - different formatting is applied in different files, classes, or even functions. I'm tired of "adjusting" for each class, ignoring IDE warnings, manually removing autocorrect from the IDE's built-in formatter, etc. I suggest formatting the code once using black formatter: https://pypi.org/project/black/ And further accept only formatted code. I used this command to format the code: python -m black neofs-testcases -t py310 Signed-off-by: Oleg Kulachenko --- pytest_tests/helpers/aws_cli_client.py | 2 +- pytest_tests/helpers/file_helper.py | 4 +- pytest_tests/helpers/grpc_responses.py | 5 +- pytest_tests/steps/session_token.py | 4 +- .../acl/storage_group/test_storagegroup.py | 4 +- pytest_tests/testsuites/acl/test_acl.py | 24 +++- pytest_tests/testsuites/acl/test_eacl.py | 13 +- pytest_tests/testsuites/conftest.py | 32 +++-- .../testsuites/container/test_container.py | 4 +- .../failovers/test_failover_network.py | 75 ++++++---- .../failovers/test_failover_part.py | 13 +- .../testsuites/network/test_config_changes.py | 17 ++- .../testsuites/object/test_object_api.py | 51 +++---- .../testsuites/object/test_object_lock.py | 33 +++-- .../services/http_gate/test_http_bearer.py | 12 +- .../services/http_gate/test_http_gate.py | 79 ++++++----- .../test_static_session_token_container.py | 73 +++++----- .../testsuites/shard/test_control_shard.py | 4 +- robot/resources/lib/python_keywords/acl.py | 4 +- robot/resources/lib/python_keywords/epoch.py | 16 ++- .../lib/python_keywords/failover_utils.py | 40 ++++-- .../lib/python_keywords/http_gate.py | 20 ++- .../lib/python_keywords/neofs_verbs.py | 4 +- .../lib/python_keywords/object_access.py | 4 +- robot/variables/common.py | 15 +- tools/src/openssl_config_fix.py | 46 +++---- tools/src/process-allure-reports.py | 129 ++++++++++++------ tools/src/zip_dev_env_logs.py | 16 +-- tools/tests/test_modify_openssl_config.py | 12 +- 29 files changed, 458 insertions(+), 297 deletions(-) diff --git a/pytest_tests/helpers/aws_cli_client.py b/pytest_tests/helpers/aws_cli_client.py index ab3d4f15b..0f70d6f31 100644 --- a/pytest_tests/helpers/aws_cli_client.py +++ b/pytest_tests/helpers/aws_cli_client.py @@ -21,7 +21,7 @@ class AwsCliClient: def __init__(self, s3gate_endpoint) -> None: self.s3gate_endpoint = s3gate_endpoint - os.environ['AWS_EC2_METADATA_DISABLED'] = 'true' + os.environ["AWS_EC2_METADATA_DISABLED"] = "true" def create_bucket( self, diff --git a/pytest_tests/helpers/file_helper.py b/pytest_tests/helpers/file_helper.py index 1178c6308..c30ec0cec 100644 --- a/pytest_tests/helpers/file_helper.py +++ b/pytest_tests/helpers/file_helper.py @@ -99,7 +99,9 @@ def concat_files(file_paths: list, resulting_file_path: Optional[str] = None) -> Path to the resulting file. """ if not resulting_file_path: - resulting_file_path = os.path.join(os.getcwd(), ASSETS_DIR, TEST_FILES_DIR, str(uuid.uuid4())) + resulting_file_path = os.path.join( + os.getcwd(), ASSETS_DIR, TEST_FILES_DIR, str(uuid.uuid4()) + ) with open(resulting_file_path, "wb") as f: for file in file_paths: with open(file, "rb") as part_file: diff --git a/pytest_tests/helpers/grpc_responses.py b/pytest_tests/helpers/grpc_responses.py index dce630819..8c473a191 100644 --- a/pytest_tests/helpers/grpc_responses.py +++ b/pytest_tests/helpers/grpc_responses.py @@ -34,7 +34,10 @@ EACL_TIMED_OUT = "eACL setting: await timeout expired" EACL_TABLE_IS_NOT_SET = "extended ACL table is not set for this container" EACL_NOT_FOUND = "code = 3073.*message = eACL not found" -EACL_PROHIBITED_TO_MODIFY_SYSTEM_ACCESS = "table validation: it is prohibited to modify system access" +EACL_PROHIBITED_TO_MODIFY_SYSTEM_ACCESS = ( + "table validation: it is prohibited to modify system access" +) + def error_matches_status(error: Exception, status_pattern: str) -> bool: """ diff --git a/pytest_tests/steps/session_token.py b/pytest_tests/steps/session_token.py index a0ecdcef6..d1d2e9f46 100644 --- a/pytest_tests/steps/session_token.py +++ b/pytest_tests/steps/session_token.py @@ -237,7 +237,7 @@ def create_session_token( wallet_password: str, rpc_endpoint: str, lifetime: Optional[int] = None, - expire_at: Optional[int] = None + expire_at: Optional[int] = None, ) -> str: """ Create session token for an object. @@ -259,7 +259,7 @@ def create_session_token( wallet_password=wallet_password, out=session_token, lifetime=lifetime, - expire_at=expire_at + expire_at=expire_at, ) return session_token diff --git a/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py b/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py index d0358d26e..704f96aea 100644 --- a/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py +++ b/pytest_tests/testsuites/acl/storage_group/test_storagegroup.py @@ -47,7 +47,9 @@ class TestStorageGroup(ClusterTestBase): @pytest.fixture(autouse=True) def prepare_two_wallets(self, default_wallet): self.main_wallet = default_wallet - self.other_wallet = os.path.join(os.getcwd(), ASSETS_DIR, TEST_FILES_DIR, f"{str(uuid.uuid4())}.json") + self.other_wallet = os.path.join( + os.getcwd(), ASSETS_DIR, TEST_FILES_DIR, f"{str(uuid.uuid4())}.json" + ) init_wallet(self.other_wallet, WALLET_PASS) if not FREE_STORAGE: main_chain = self.cluster.main_chain_nodes[0] diff --git a/pytest_tests/testsuites/acl/test_acl.py b/pytest_tests/testsuites/acl/test_acl.py index 9a1b4d92f..809c437a1 100644 --- a/pytest_tests/testsuites/acl/test_acl.py +++ b/pytest_tests/testsuites/acl/test_acl.py @@ -3,8 +3,15 @@ from testsuites.acl.conftest import Wallets from cluster_test_base import ClusterTestBase -from object_access import can_put_object, can_get_object, can_get_head_object, can_search_object, \ - can_get_range_of_object, can_get_range_hash_of_object, can_delete_object +from object_access import ( + can_put_object, + can_get_object, + can_get_head_object, + can_search_object, + can_get_range_of_object, + can_get_range_hash_of_object, + can_delete_object, +) from python_keywords.acl import EACLRole from python_keywords.container import create_container from python_keywords.container_access import ( @@ -184,7 +191,9 @@ def test_basic_acl_readonly(self, wallets, client_shell, read_only_container, fi ) @allure.title("Test basic ACL IR and STORAGE rules compliance") - def test_basic_acl_ir_storage_rules_compliance(self, wallets: Wallets, public_container: str, file_path: str): + def test_basic_acl_ir_storage_rules_compliance( + self, wallets: Wallets, public_container: str, file_path: str + ): """ Test basic ACL IR and STORAGE rules compliance. @@ -311,7 +320,9 @@ def test_basic_acl_ir_storage_rules_compliance(self, wallets: Wallets, public_co endpoint=endpoint, wallet_config=ir_wallet.config_path, ) - with allure.step("STORAGE node should NOT be able to GET range of object from container"): + with allure.step( + "STORAGE node should NOT be able to GET range of object from container" + ): assert not can_get_range_of_object( wallet=storage_wallet.wallet_path, cid=cid, @@ -330,7 +341,9 @@ def test_basic_acl_ir_storage_rules_compliance(self, wallets: Wallets, public_co endpoint=endpoint, wallet_config=ir_wallet.config_path, ) - with allure.step("STORAGE node should be able to GET range hash of object from container"): + with allure.step( + "STORAGE node should be able to GET range hash of object from container" + ): assert can_get_range_hash_of_object( wallet=storage_wallet.wallet_path, cid=cid, @@ -358,4 +371,3 @@ def test_basic_acl_ir_storage_rules_compliance(self, wallets: Wallets, public_co endpoint=endpoint, wallet_config=storage_wallet.config_path, ) - diff --git a/pytest_tests/testsuites/acl/test_eacl.py b/pytest_tests/testsuites/acl/test_eacl.py index b8f3911b1..614ef95bd 100644 --- a/pytest_tests/testsuites/acl/test_eacl.py +++ b/pytest_tests/testsuites/acl/test_eacl.py @@ -11,7 +11,8 @@ EACLRule, create_eacl, set_eacl, - wait_for_cache_expired, get_eacl, + wait_for_cache_expired, + get_eacl, ) from python_keywords.container import create_container from python_keywords.container_access import ( @@ -302,8 +303,12 @@ def test_deprecated_change_system_eacl(self, wallets, eacl_container_with_object @pytest.mark.trusted_party_proved @pytest.mark.system_eacl - @allure.title("Test case for verifying the impossible to change system extended ACL if eACL already set") - def test_deprecated_change_system_eacl_if_eacl_already_set(self, wallets, eacl_container_with_objects): + @allure.title( + "Test case for verifying the impossible to change system extended ACL if eACL already set" + ) + def test_deprecated_change_system_eacl_if_eacl_already_set( + self, wallets, eacl_container_with_objects + ): user_wallet = wallets.get_wallet() cid, object_oids, file_path = eacl_container_with_objects endpoint = self.cluster.default_rpc_endpoint @@ -489,7 +494,7 @@ def test_only_owner_can_set_eacl( self, wallets: Wallets, eacl_full_placement_container_with_object: tuple[str, str, str], - not_owner_wallet: str + not_owner_wallet: str, ): cid, oid, file_path = eacl_full_placement_container_with_object diff --git a/pytest_tests/testsuites/conftest.py b/pytest_tests/testsuites/conftest.py index 3183e9900..ac91e5554 100644 --- a/pytest_tests/testsuites/conftest.py +++ b/pytest_tests/testsuites/conftest.py @@ -66,6 +66,7 @@ def pytest_runtest_makereport(item, call): # be "setup", "call", "teardown" item.stash.setdefault(phase_report_key, {})[rep.when] = rep + def pytest_collection_modifyitems(items): # Make network tests last based on @pytest.mark.node_mgmt def priority(item: pytest.Item) -> int: @@ -167,7 +168,7 @@ def temp_directory() -> str: @pytest.fixture(scope="module", autouse=True) -@allure.title(f'Prepare test files directories') +@allure.title(f"Prepare test files directories") def artifacts_directory(temp_directory: str) -> None: dirs = [TEST_FILES_DIR, TEST_OBJECTS_DIR] for dir_name in dirs: @@ -185,7 +186,7 @@ def artifacts_directory(temp_directory: str) -> None: @pytest.fixture(scope="session", autouse=True) @allure.title("Collect full logs") def collect_full_tests_logs(temp_directory, hosting: Hosting): - test_name = 'full_logs' + test_name = "full_logs" start_time = datetime.utcnow() yield end_time = datetime.utcnow() @@ -199,15 +200,15 @@ def collect_full_tests_logs(temp_directory, hosting: Hosting): def collect_test_logs(request, temp_directory, hosting: Hosting): test_name = request.node.nodeid.translate(str.maketrans(":[]/", "____")) hash_suffix = hashlib.md5(test_name.encode()).hexdigest() - file_name = (test_name[:200] + '_' + hash_suffix) # limit total length to 255 + file_name = test_name[:200] + "_" + hash_suffix # limit total length to 255 logs_dir = os.path.join(temp_directory, "logs") - with allure.step(f'Start collecting logs for {file_name}'): + with allure.step(f"Start collecting logs for {file_name}"): start_time = datetime.utcnow() yield # https://docs.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures report = request.node.stash[phase_report_key] if report["setup"].failed or ("call" in report and report["call"].failed): - with allure.step(f'Stop collecting logs for {file_name}, logs path: {logs_dir} '): + with allure.step(f"Stop collecting logs for {file_name}, logs path: {logs_dir} "): end_time = datetime.utcnow() store_logs(hosting, logs_dir, file_name, start_time, end_time) @@ -219,13 +220,13 @@ def netinfo(request, cluster: Cluster, client_shell: Shell, default_wallet: str) report = request.node.stash[phase_report_key] if report["setup"].failed or ("call" in report and report["call"].failed): for node in cluster.storage_nodes: - with allure.step(f'Checking netinfo for node {node}'): + with allure.step(f"Checking netinfo for node {node}"): net_info = get_netmap_netinfo( wallet=default_wallet, endpoint=node.get_rpc_endpoint(), shell=client_shell, ) - logger.info(f'Netinfo from {node}:\n{net_info}') + logger.info(f"Netinfo from {node}:\n{net_info}") @pytest.fixture(scope="session", autouse=True) @@ -309,7 +310,7 @@ def background_grpc_load(client_shell: Shell, hosting: Hosting): @pytest.fixture(scope="function") @allure.title("Prepare not owner wallet and deposit") def not_owner_wallet(client_shell: Shell, temp_directory: str, cluster: Cluster) -> str: - wallet_path = create_wallet(client_shell, temp_directory, cluster, 'not_owner_wallet') + wallet_path = create_wallet(client_shell, temp_directory, cluster, "not_owner_wallet") yield wallet_path os.remove(wallet_path) @@ -319,6 +320,7 @@ def not_owner_wallet(client_shell: Shell, temp_directory: str, cluster: Cluster) def default_wallet(client_shell: Shell, temp_directory: str, cluster: Cluster): return create_wallet(client_shell, temp_directory, cluster) + @allure.title("Check logs for OOM and PANIC entries in {logs_dir}") def check_logs(logs_dir: str): problem_pattern = r"\Wpanic\W|\Woom\W|\Wtoo many open files\W" @@ -341,7 +343,9 @@ def check_logs(logs_dir: str): raise pytest.fail(f"System logs {', '.join(logs_with_problem)} contain critical errors") -def store_logs(hosting: Hosting, logs_dir: str, file_name: str, start_time: datetime, end_time: datetime) -> None: +def store_logs( + hosting: Hosting, logs_dir: str, file_name: str, start_time: datetime, end_time: datetime +) -> None: os.makedirs(logs_dir, exist_ok=True) dump_logs(hosting, logs_dir, start_time, end_time) attach_logs(logs_dir, os.path.join(os.getcwd(), ASSETS_DIR, file_name)) @@ -363,7 +367,7 @@ def attach_logs(logs_dir: str, test_name: str) -> None: # Zip all files and attach to Allure because it is more convenient to download a single # zip with all logs rather than mess with individual logs files per service or node logs_zip_file_path = shutil.make_archive(test_name, "zip", logs_dir) - allure.attach.file(logs_zip_file_path, name=f'{test_name}.zip', extension="zip") + allure.attach.file(logs_zip_file_path, name=f"{test_name}.zip", extension="zip") def create_dir(dir_path: str) -> None: @@ -378,11 +382,13 @@ def remove_dir(dir_path: str) -> None: @allure.title("Prepare wallet and deposit") -def create_wallet(client_shell: Shell, temp_directory: str, cluster: Cluster, name: Optional[str] = None) -> str: +def create_wallet( + client_shell: Shell, temp_directory: str, cluster: Cluster, name: Optional[str] = None +) -> str: if name is None: - wallet_name = f'{str(uuid.uuid4())}.json' + wallet_name = f"{str(uuid.uuid4())}.json" else: - wallet_name = f'{name}.json' + wallet_name = f"{name}.json" wallet_path = os.path.join(os.getcwd(), ASSETS_DIR, wallet_name) init_wallet(wallet_path, WALLET_PASS) diff --git a/pytest_tests/testsuites/container/test_container.py b/pytest_tests/testsuites/container/test_container.py index 1117914ca..48583ea61 100644 --- a/pytest_tests/testsuites/container/test_container.py +++ b/pytest_tests/testsuites/container/test_container.py @@ -91,9 +91,7 @@ def test_container_creation(self, default_wallet, name): @pytest.mark.trusted_party_proved @allure.title("Not owner and not trusted party can NOT delete container") def test_only_owner_can_delete_container( - self, - not_owner_wallet: WalletFile, - default_wallet: str + self, not_owner_wallet: WalletFile, default_wallet: str ): cid = create_container( wallet=default_wallet, diff --git a/pytest_tests/testsuites/failovers/test_failover_network.py b/pytest_tests/testsuites/failovers/test_failover_network.py index 19a5a03e6..8c4047339 100644 --- a/pytest_tests/testsuites/failovers/test_failover_network.py +++ b/pytest_tests/testsuites/failovers/test_failover_network.py @@ -6,7 +6,11 @@ import pytest import subprocess from cluster import StorageNode -from failover_utils import wait_all_storage_nodes_returned, wait_object_replication, get_morph_chain_endpoints +from failover_utils import ( + wait_all_storage_nodes_returned, + wait_object_replication, + get_morph_chain_endpoints, +) from file_helper import generate_file, get_file_hash from iptables_helper import IpTablesHelper from python_keywords.container import create_container @@ -124,20 +128,34 @@ def test_rpc_reconnection(self, default_wallet, hosting: Hosting): dport_repeat = 10 # Constant for the number of the disconnect should be repeated morph_chain_endpoints = get_morph_chain_endpoints(hosting) - required_keys = ['epoch', 'time_per_block', 'audit_fee', 'storage_price', 'container_fee', 'eigentrust_alpha', - 'number_of_eigentrust_iterations', 'epoch_duration', 'inner_ring_candidate_fee', - 'maximum_object_size', 'withdrawal_fee', 'systemdns', 'homomorphic_hashing_disabled', - 'maintenance_mode_allowed'] + required_keys = [ + "epoch", + "time_per_block", + "audit_fee", + "storage_price", + "container_fee", + "eigentrust_alpha", + "number_of_eigentrust_iterations", + "epoch_duration", + "inner_ring_candidate_fee", + "maximum_object_size", + "withdrawal_fee", + "systemdns", + "homomorphic_hashing_disabled", + "maintenance_mode_allowed", + ] for storage_node in hosting.find_service_configs(STORAGE_NODE_SERVICE_NAME_REGEX): host = hosting.get_host_by_service(storage_node.name) pid = host.get_service_pid(storage_node.name) for morph_chain_addr, morph_chain_port in morph_chain_endpoints: - with allure.step(f'Disconnecting storage node {storage_node.name} ' - f'from {morph_chain_addr} {dport_repeat} times'): + with allure.step( + f"Disconnecting storage node {storage_node.name} " + f"from {morph_chain_addr} {dport_repeat} times" + ): for repeat in range(dport_repeat): - with allure.step(f'Disconnect number {repeat}'): + with allure.step(f"Disconnect number {repeat}"): try: """ Of course, it would be cleaner to use such code: @@ -147,38 +165,40 @@ def test_rpc_reconnection(self, default_wallet, hosting: Hosting): But we face the limitations of the ubuntu-latest runner: And using setfacl is not possible due to GitHub ubuntu-latest runner limitations. """ - command = f'ss -K dst {morph_chain_addr} dport {morph_chain_port}' - sudo_command = f'sudo nsenter -t {pid} -n {command}' + command = f"ss -K dst {morph_chain_addr} dport {morph_chain_port}" + sudo_command = f"sudo nsenter -t {pid} -n {command}" output = subprocess.check_output(sudo_command, shell=True) - logger.info(f'Output of the command {sudo_command}: {output}') + logger.info(f"Output of the command {sudo_command}: {output}") except subprocess.CalledProcessError as e: logger.error( - f'Error occurred while running command: {sudo_command}. Error message: {str(e)}') + f"Error occurred while running command: {sudo_command}. Error message: {str(e)}" + ) raise finally: # Delay between shutdown attempts, emulates a real disconnection sleep(1) logger.info( - f'Disconnected storage node {storage_node.name} ' - f'from {morph_chain_addr} {dport_repeat} times') + f"Disconnected storage node {storage_node.name} " + f"from {morph_chain_addr} {dport_repeat} times" + ) for node in self.cluster.storage_nodes: - with allure.step(f'Checking if node {node} is alive'): + with allure.step(f"Checking if node {node} is alive"): try: health_check = storage_node_healthcheck(node) assert ( - health_check.health_status == "READY" - and health_check.network_status == "ONLINE" + health_check.health_status == "READY" + and health_check.network_status == "ONLINE" ) except Exception as err: - logger.warning(f'Node {node} is not online:\n{err}') + logger.warning(f"Node {node} is not online:\n{err}") raise AssertionError( - f'After the RPC connection failed, the storage node {node} DID NOT reconnect ' - f'to any other node and FAILED to continue operating. ' + f"After the RPC connection failed, the storage node {node} DID NOT reconnect " + f"to any other node and FAILED to continue operating. " ) - with allure.step(f'Checking netinfo for node {node}'): + with allure.step(f"Checking netinfo for node {node}"): try: net_info = get_netmap_netinfo( wallet=default_wallet, @@ -188,15 +208,16 @@ def test_rpc_reconnection(self, default_wallet, hosting: Hosting): missing_keys = [key for key in required_keys if key not in net_info] if missing_keys: raise AssertionError( - f'Error occurred while checking netinfo for node {node} - ' - f'missing keys in the output: {missing_keys}.' - f'Netmap netinfo: {net_info}' + f"Error occurred while checking netinfo for node {node} - " + f"missing keys in the output: {missing_keys}." + f"Netmap netinfo: {net_info}" ) except Exception as err: logger.warning( - f'Error occurred while checking netinfo for node {node}. Error message: {str(err)}') + f"Error occurred while checking netinfo for node {node}. Error message: {str(err)}" + ) raise Exception( - f'After the RPC connection failed, the storage node {node} cannot get netmap netinfo' + f"After the RPC connection failed, the storage node {node} cannot get netmap netinfo" ) - logger.info(f'Node {node} is alive and online') + logger.info(f"Node {node} is alive and online") diff --git a/pytest_tests/testsuites/failovers/test_failover_part.py b/pytest_tests/testsuites/failovers/test_failover_part.py index 18b26bef6..8282daff0 100644 --- a/pytest_tests/testsuites/failovers/test_failover_part.py +++ b/pytest_tests/testsuites/failovers/test_failover_part.py @@ -41,9 +41,12 @@ def user_container(user_wallet: WalletFile, client_shell: Shell, cluster: Cluste class TestFailoverNodePart(ClusterTestBase): @allure.title("Enable resync metabase, delete metadata and get object") @pytest.mark.delete_metadata - def test_enable_resync_metabase_delete_metadata(self, enable_metabase_resync_on_start, - user_container: StorageContainer, - simple_object_size: int): + def test_enable_resync_metabase_delete_metadata( + self, + enable_metabase_resync_on_start, + user_container: StorageContainer, + simple_object_size: int, + ): storage_object = user_container.generate_object(simple_object_size) with allure.step("Delete metabase files from storage nodes"): @@ -66,7 +69,9 @@ def test_enable_resync_metabase_delete_metadata(self, enable_metabase_resync_on_ wallet_config=user_container.get_wallet_config_path(), ) - @allure.title("Delete metadata without resync metabase enabling, delete metadata try to get object") + @allure.title( + "Delete metadata without resync metabase enabling, delete metadata try to get object" + ) @pytest.mark.delete_metadata def test_delete_metadata(self, user_container: StorageContainer, simple_object_size: int): storage_object = user_container.generate_object(simple_object_size) diff --git a/pytest_tests/testsuites/network/test_config_changes.py b/pytest_tests/testsuites/network/test_config_changes.py index f2f7bdffc..489d0a489 100644 --- a/pytest_tests/testsuites/network/test_config_changes.py +++ b/pytest_tests/testsuites/network/test_config_changes.py @@ -67,15 +67,22 @@ def test_config_update_multiple_values(self, clean_config: None): ], ) @allure.title("Set network config key to invalid value") - def test_config_set_invalid_value(self, key: str, value: Union[str, int, bool], expected_type: type): - with pytest.raises(RuntimeError, match=f"Error: invalid value for {key} key, " - f"expected {expected_type.__name__}, got '{str(value).lower()}'"): + def test_config_set_invalid_value( + self, key: str, value: Union[str, int, bool], expected_type: type + ): + with pytest.raises( + RuntimeError, + match=f"Error: invalid value for {key} key, " + f"expected {expected_type.__name__}, got '{str(value).lower()}'", + ): self._set_and_verify_config_keys(**{key: value}) @allure.title("Set multiple network config keys to invalid values with force") def test_config_set_multiple_invalid_values(self): - with pytest.raises(RuntimeError, match="Error: invalid value for MaxObjectSize key, " - "expected int, got 'verybigsize'"): + with pytest.raises( + RuntimeError, + match="Error: invalid value for MaxObjectSize key, " "expected int, got 'verybigsize'", + ): self._set_and_verify_config_keys( **{"MaxObjectSize": "VeryBigSize", "BasicIncomeRate": False}, force=True ) diff --git a/pytest_tests/testsuites/object/test_object_api.py b/pytest_tests/testsuites/object/test_object_api.py index a7dd7ca0b..2c1fb5619 100755 --- a/pytest_tests/testsuites/object/test_object_api.py +++ b/pytest_tests/testsuites/object/test_object_api.py @@ -137,13 +137,14 @@ def storage_objects( with expect_not_raises(): delete_objects(storage_objects, client_shell, cluster) + @pytest.fixture def container(default_wallet: str, client_shell: Shell, cluster: Cluster) -> str: - cid = create_container(default_wallet, shell=client_shell, endpoint=cluster.default_rpc_endpoint) - yield cid - delete_container( - default_wallet, cid, shell=client_shell, endpoint=cluster.default_rpc_endpoint + cid = create_container( + default_wallet, shell=client_shell, endpoint=cluster.default_rpc_endpoint ) + yield cid + delete_container(default_wallet, cid, shell=client_shell, endpoint=cluster.default_rpc_endpoint) @pytest.mark.grpc_api @@ -244,21 +245,14 @@ def test_search_object_api( cid = storage_objects[0].cid def _generate_filters_expressions(attrib_dict: dict[str, str]): - return [f"{filter_key} EQ {filter_val}" for filter_key, filter_val in attrib_dict.items()] + return [ + f"{filter_key} EQ {filter_val}" for filter_key, filter_val in attrib_dict.items() + ] test_table = [ - ( - _generate_filters_expressions(OBJECT_ATTRIBUTES[1]), - oids[1:2] - ), - ( - _generate_filters_expressions(OBJECT_ATTRIBUTES[2]), - oids[2:3] - ), - ( - _generate_filters_expressions(COMMON_ATTRIBUTE), - oids[1:3] - ), + (_generate_filters_expressions(OBJECT_ATTRIBUTES[1]), oids[1:2]), + (_generate_filters_expressions(OBJECT_ATTRIBUTES[2]), oids[2:3]), + (_generate_filters_expressions(COMMON_ATTRIBUTE), oids[1:3]), ] with allure.step("Search objects"): @@ -360,7 +354,9 @@ def test_object_search_should_return_tombstone_items( ), f"Object wasn't deleted properly. Found object {tombstone_oid} with type {object_type}" @allure.title("Validate objects search by common prefix") - def test_search_object_api_common_prefix(self, default_wallet: str, simple_object_size: int, container: str): + def test_search_object_api_common_prefix( + self, default_wallet: str, simple_object_size: int, container: str + ): FILEPATH_ATTR_NAME = "FilePath" NUMBER_OF_OBJECTS = 5 wallet = default_wallet @@ -376,15 +372,15 @@ def test_search_object_api_common_prefix(self, default_wallet: str, simple_objec cid=container, shell=self.shell, cluster=self.cluster, - attributes={FILEPATH_ATTR_NAME: file_path} + attributes={FILEPATH_ATTR_NAME: file_path}, ) all_oids = sorted(objects.values()) for common_prefix, expected_oids in ( - ('/', all_oids), + ("/", all_oids), (os.path.join(os.getcwd(), ASSETS_DIR), all_oids), (os.path.join(os.getcwd(), ASSETS_DIR, TEST_FILES_DIR), all_oids), - (file_path, [objects[file_path]]) + (file_path, [objects[file_path]]), ): with allure.step(f"Search objects by path: {common_prefix}"): search_object( @@ -395,15 +391,10 @@ def test_search_object_api_common_prefix(self, default_wallet: str, simple_objec filters=[f"{FILEPATH_ATTR_NAME} COMMON_PREFIX {common_prefix}"], expected_objects_list=expected_oids, root=True, - fail_on_assert=True + fail_on_assert=True, ) - - for common_prefix in ( - f"{file_path}/o123" - '/COMMON_PREFIX', - '?', - '213' - ): + + for common_prefix in (f"{file_path}/o123" "/COMMON_PREFIX", "?", "213"): with allure.step(f"Search objects by path: {common_prefix}"): with pytest.raises(AssertionError): @@ -415,7 +406,7 @@ def test_search_object_api_common_prefix(self, default_wallet: str, simple_objec filters=[f"{FILEPATH_ATTR_NAME} COMMON_PREFIX {common_prefix}"], expected_objects_list=expected_oids, root=True, - fail_on_assert=True + fail_on_assert=True, ) @allure.title("Validate native object API get_range_hash") diff --git a/pytest_tests/testsuites/object/test_object_lock.py b/pytest_tests/testsuites/object/test_object_lock.py index e38b0f288..1bfd6f0a1 100755 --- a/pytest_tests/testsuites/object/test_object_lock.py +++ b/pytest_tests/testsuites/object/test_object_lock.py @@ -8,7 +8,11 @@ from common import STORAGE_GC_TIME from complex_object_actions import get_link_object, get_storage_object_chunks from epoch import ensure_fresh_epoch, get_epoch, tick_epoch, tick_epoch_and_wait -from failover_utils import wait_all_storage_nodes_returned, enable_metabase_resync_on_start, docker_compose_restart_storage_nodes +from failover_utils import ( + wait_all_storage_nodes_returned, + enable_metabase_resync_on_start, + docker_compose_restart_storage_nodes, +) from grpc_responses import ( LIFETIME_REQUIRED, LOCK_NON_REGULAR_OBJECT, @@ -19,7 +23,12 @@ OBJECT_NOT_FOUND, ) from neofs_testlib.shell import Shell -from node_management import drop_object, delete_node_metadata, stop_storage_nodes, start_storage_nodes +from node_management import ( + drop_object, + delete_node_metadata, + stop_storage_nodes, + start_storage_nodes, +) from pytest import FixtureRequest from python_keywords.container import create_container from python_keywords.neofs_verbs import delete_object, head_object, lock_object @@ -687,8 +696,10 @@ def test_link_object_of_complex_object_should_also_be_protected_from_deletion( ) @pytest.mark.delete_metadata - @allure.title("The locked object must be protected from deletion after metabase deletion " - "(metabase resynchronization must be enabled), and after restarting storage nodes") + @allure.title( + "The locked object must be protected from deletion after metabase deletion " + "(metabase resynchronization must be enabled), and after restarting storage nodes" + ) @pytest.mark.parametrize( "new_locked_storage_object", [pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")], @@ -696,9 +707,9 @@ def test_link_object_of_complex_object_should_also_be_protected_from_deletion( indirect=True, ) def test_the_object_lock_should_be_kept_after_metabase_deletion( - self, - new_locked_storage_object: StorageObjectInfo, - enable_metabase_resync_on_start, + self, + new_locked_storage_object: StorageObjectInfo, + enable_metabase_resync_on_start, ): """ Lock objects should fill metabase on resync_metabase @@ -711,7 +722,9 @@ def test_the_object_lock_should_be_kept_after_metabase_deletion( nodes=self.cluster.storage_nodes, ) - with allure.step(f"Try to delete object {new_locked_storage_object.oid} before metabase deletion"): + with allure.step( + f"Try to delete object {new_locked_storage_object.oid} before metabase deletion" + ): with pytest.raises(Exception, match=OBJECT_IS_LOCKED): delete_object( new_locked_storage_object.wallet_file_path, @@ -749,7 +762,9 @@ def test_the_object_lock_should_be_kept_after_metabase_deletion( assert nodes_with_object_before_first_try == nodes_with_object_after_metabase_deletion - with allure.step(f"Try to delete object {new_locked_storage_object.oid} after metabase deletion"): + with allure.step( + f"Try to delete object {new_locked_storage_object.oid} after metabase deletion" + ): with pytest.raises(Exception, match=OBJECT_IS_LOCKED): delete_object( new_locked_storage_object.wallet_file_path, diff --git a/pytest_tests/testsuites/services/http_gate/test_http_bearer.py b/pytest_tests/testsuites/services/http_gate/test_http_bearer.py index e0565ffd1..6c067b69f 100644 --- a/pytest_tests/testsuites/services/http_gate/test_http_bearer.py +++ b/pytest_tests/testsuites/services/http_gate/test_http_bearer.py @@ -95,7 +95,7 @@ def test_unable_put_without_bearer_token( error_pattern="access to object operation denied", ) - @pytest.mark.parametrize("bearer_type", ('header', 'cookie')) + @pytest.mark.parametrize("bearer_type", ("header", "cookie")) @pytest.mark.parametrize( "object_size", [pytest.lazy_fixture("simple_object_size"), pytest.lazy_fixture("complex_object_size")], @@ -117,17 +117,17 @@ def test_put_with_bearer_when_eacl_restrict( ): headers = None cookies = None - if bearer_type == 'header': + if bearer_type == "header": headers = [f" -H 'Authorization: Bearer {bearer}'"] - if bearer_type == 'cookie': - cookies = {'Bearer': bearer} - + if bearer_type == "cookie": + cookies = {"Bearer": bearer} + oid = upload_via_http_gate_curl( cid=user_container, filepath=file_path, endpoint=self.cluster.default_http_gate_endpoint, headers=headers, - cookies=cookies + cookies=cookies, ) get_object_and_verify_hashes( oid=oid, diff --git a/pytest_tests/testsuites/services/http_gate/test_http_gate.py b/pytest_tests/testsuites/services/http_gate/test_http_gate.py index 281821f73..174c50488 100644 --- a/pytest_tests/testsuites/services/http_gate/test_http_gate.py +++ b/pytest_tests/testsuites/services/http_gate/test_http_gate.py @@ -119,13 +119,13 @@ def test_put_http_get_http_content_disposition(self, simple_object_size): ) resp = get_via_http_gate( cid=cid, - oid=oid, - endpoint=self.cluster.default_http_gate_endpoint, - return_response=True + oid=oid, + endpoint=self.cluster.default_http_gate_endpoint, + return_response=True, ) - content_disposition_type, filename = resp.headers['Content-Disposition'].split(';') - assert content_disposition_type.strip() == 'inline' - assert filename.strip().split('=')[1] == file_path.split('/')[-1] + content_disposition_type, filename = resp.headers["Content-Disposition"].split(";") + assert content_disposition_type.strip() == "inline" + assert filename.strip().split("=")[1] == file_path.split("/")[-1] with allure.step("Verify Content-Disposition with download=true"): file_path = generate_file(simple_object_size) @@ -137,14 +137,14 @@ def test_put_http_get_http_content_disposition(self, simple_object_size): ) resp = get_via_http_gate( cid=cid, - oid=oid, - endpoint=self.cluster.default_http_gate_endpoint, + oid=oid, + endpoint=self.cluster.default_http_gate_endpoint, return_response=True, - download=True + download=True, ) - content_disposition_type, filename = resp.headers['Content-Disposition'].split(';') - assert content_disposition_type.strip() == 'attachment' - assert filename.strip().split('=')[1] == file_path.split('/')[-1] + content_disposition_type, filename = resp.headers["Content-Disposition"].split(";") + assert content_disposition_type.strip() == "attachment" + assert filename.strip().split("=")[1] == file_path.split("/")[-1] @allure.title("Verify Content-Type if uploaded without any Content-Type specified") def test_put_http_get_http_without_content_type(self, simple_object_size): @@ -165,9 +165,14 @@ def test_put_http_get_http_without_content_type(self, simple_object_size): endpoint=self.cluster.default_http_gate_endpoint, ) - resp = get_via_http_gate(cid=cid, oid=oid, endpoint=self.cluster.default_http_gate_endpoint, return_response=True) - assert resp.headers['Content-Type'] == 'application/octet-stream' - + resp = get_via_http_gate( + cid=cid, + oid=oid, + endpoint=self.cluster.default_http_gate_endpoint, + return_response=True, + ) + assert resp.headers["Content-Type"] == "application/octet-stream" + with allure.step("Upload text object"): file_path = generate_file_with_content(simple_object_size, content="123") @@ -177,8 +182,13 @@ def test_put_http_get_http_without_content_type(self, simple_object_size): endpoint=self.cluster.default_http_gate_endpoint, ) - resp = get_via_http_gate(cid=cid, oid=oid, endpoint=self.cluster.default_http_gate_endpoint, return_response=True) - assert resp.headers['Content-Type'] == 'text/plain; charset=utf-8' + resp = get_via_http_gate( + cid=cid, + oid=oid, + endpoint=self.cluster.default_http_gate_endpoint, + return_response=True, + ) + assert resp.headers["Content-Type"] == "text/plain; charset=utf-8" @allure.title("Verify Content-Type if uploaded with X-Attribute-Content-Type") def test_put_http_get_http_with_x_atribute_content_type(self, simple_object_size): @@ -201,8 +211,13 @@ def test_put_http_get_http_with_x_atribute_content_type(self, simple_object_size endpoint=self.cluster.default_http_gate_endpoint, ) - resp = get_via_http_gate(cid=cid, oid=oid, endpoint=self.cluster.default_http_gate_endpoint, return_response=True) - assert resp.headers['Content-Type'] == 'CoolContentType' + resp = get_via_http_gate( + cid=cid, + oid=oid, + endpoint=self.cluster.default_http_gate_endpoint, + return_response=True, + ) + assert resp.headers["Content-Type"] == "CoolContentType" @allure.title("Verify Content-Type if uploaded with multipart Content-Type") def test_put_http_get_http_with_multipart_content_type(self): @@ -213,19 +228,24 @@ def test_put_http_get_http_with_multipart_content_type(self): rule=self.PLACEMENT_RULE_2, basic_acl=PUBLIC_ACL, ) - + with allure.step("Upload object with multipart content type"): - file_path = generate_file_with_content(0, content='123') + file_path = generate_file_with_content(0, content="123") oid = upload_via_http_gate( cid=cid, path=file_path, endpoint=self.cluster.default_http_gate_endpoint, - file_content_type='application/json' + file_content_type="application/json", ) - resp = get_via_http_gate(cid=cid, oid=oid, endpoint=self.cluster.default_http_gate_endpoint, return_response=True) - assert resp.headers['Content-Type'] == 'application/json' + resp = get_via_http_gate( + cid=cid, + oid=oid, + endpoint=self.cluster.default_http_gate_endpoint, + return_response=True, + ) + assert resp.headers["Content-Type"] == "application/json" @allure.title("Verify special HTTP headers") def test_put_http_get_http_special_attributes(self, simple_object_size, cluster: Cluster): @@ -245,17 +265,14 @@ def test_put_http_get_http_special_attributes(self, simple_object_size, cluster: endpoint=self.cluster.default_http_gate_endpoint, ) resp = get_via_http_gate( - cid=cid, - oid=oid, - endpoint=self.cluster.default_http_gate_endpoint, - return_response=True + cid=cid, oid=oid, endpoint=self.cluster.default_http_gate_endpoint, return_response=True ) with open(cluster.http_gates[0].get_wallet_path()) as wallet_file: wallet_json = json.load(wallet_file) - assert resp.headers['X-Owner-Id'] == wallet_json['accounts'][0]['address'] - assert resp.headers['X-Object-Id'] == oid - assert resp.headers['X-Container-Id'] == cid + assert resp.headers["X-Owner-Id"] == wallet_json["accounts"][0]["address"] + assert resp.headers["X-Object-Id"] == oid + assert resp.headers["X-Container-Id"] == cid @allure.link("https://github.com/nspcc-dev/neofs-http-gw#uploading", name="uploading") @allure.link("https://github.com/nspcc-dev/neofs-http-gw#downloading", name="downloading") diff --git a/pytest_tests/testsuites/session_token/test_static_session_token_container.py b/pytest_tests/testsuites/session_token/test_static_session_token_container.py index 20bd72b0c..621b0c662 100644 --- a/pytest_tests/testsuites/session_token/test_static_session_token_container.py +++ b/pytest_tests/testsuites/session_token/test_static_session_token_container.py @@ -155,14 +155,14 @@ def test_static_session_token_container_delete( @pytest.mark.trusted_party_proved @allure.title("Not owner user can NOT delete container") def test_not_owner_user_can_not_delete_container( - self, - owner_wallet: WalletFile, - user_wallet: WalletFile, - stranger_wallet: WalletFile, - scammer_wallet: WalletFile, - static_sessions: dict[ContainerVerb, str], - temp_directory: str, - not_owner_wallet, + self, + owner_wallet: WalletFile, + user_wallet: WalletFile, + stranger_wallet: WalletFile, + scammer_wallet: WalletFile, + static_sessions: dict[ContainerVerb, str], + temp_directory: str, + not_owner_wallet, ): with allure.step("Create container"): cid = create_container( @@ -171,8 +171,12 @@ def test_not_owner_user_can_not_delete_container( endpoint=self.cluster.default_rpc_endpoint, ) - user_token = self.static_session_token(owner_wallet, user_wallet, self.shell, temp_directory) - stranger_token = self.static_session_token(user_wallet, stranger_wallet, self.shell, temp_directory) + user_token = self.static_session_token( + owner_wallet, user_wallet, self.shell, temp_directory + ) + stranger_token = self.static_session_token( + user_wallet, stranger_wallet, self.shell, temp_directory + ) with allure.step("Try to delete container using stranger token"): with pytest.raises(RuntimeError, match=NOT_SESSION_CONTAINER_OWNER): @@ -200,14 +204,14 @@ def test_not_owner_user_can_not_delete_container( @pytest.mark.trusted_party_proved @allure.title("Not trusted party user can NOT delete container") def test_not_trusted_party_user_can_not_delete_container( - self, - owner_wallet: WalletFile, - user_wallet: WalletFile, - stranger_wallet: WalletFile, - scammer_wallet: WalletFile, - static_sessions: dict[ContainerVerb, str], - temp_directory: str, - not_owner_wallet, + self, + owner_wallet: WalletFile, + user_wallet: WalletFile, + stranger_wallet: WalletFile, + scammer_wallet: WalletFile, + static_sessions: dict[ContainerVerb, str], + temp_directory: str, + not_owner_wallet, ): with allure.step("Create container"): cid = create_container( @@ -216,8 +220,12 @@ def test_not_trusted_party_user_can_not_delete_container( endpoint=self.cluster.default_rpc_endpoint, ) - user_token = self.static_session_token(owner_wallet, user_wallet, self.shell, temp_directory) - stranger_token = self.static_session_token(user_wallet, stranger_wallet, self.shell, temp_directory) + user_token = self.static_session_token( + owner_wallet, user_wallet, self.shell, temp_directory + ) + stranger_token = self.static_session_token( + user_wallet, stranger_wallet, self.shell, temp_directory + ) with allure.step("Try to delete container using scammer token"): with pytest.raises(RuntimeError, match=CONTAINER_DELETION_TIMED_OUT): @@ -263,7 +271,6 @@ def test_not_trusted_party_user_can_not_delete_container( force=True, ) - def test_static_session_token_container_set_eacl( self, owner_wallet: WalletFile, @@ -305,14 +312,14 @@ def test_static_session_token_container_set_eacl( @pytest.mark.trusted_party_proved @allure.title("Not owner and not trusted party can NOT set eacl") def test_static_session_token_container_set_eacl_only_trusted_party_proved_by_the_container_owner( - self, - owner_wallet: WalletFile, - user_wallet: WalletFile, - stranger_wallet: WalletFile, - scammer_wallet: WalletFile, - static_sessions: dict[ContainerVerb, str], - temp_directory: str, - not_owner_wallet, + self, + owner_wallet: WalletFile, + user_wallet: WalletFile, + stranger_wallet: WalletFile, + scammer_wallet: WalletFile, + static_sessions: dict[ContainerVerb, str], + temp_directory: str, + not_owner_wallet, ): with allure.step("Create container"): cid = create_container( @@ -322,8 +329,12 @@ def test_static_session_token_container_set_eacl_only_trusted_party_proved_by_th endpoint=self.cluster.default_rpc_endpoint, ) - user_token = self.static_session_token(owner_wallet, user_wallet, self.shell, temp_directory) - stranger_token = self.static_session_token(user_wallet, stranger_wallet, self.shell, temp_directory) + user_token = self.static_session_token( + owner_wallet, user_wallet, self.shell, temp_directory + ) + stranger_token = self.static_session_token( + user_wallet, stranger_wallet, self.shell, temp_directory + ) new_eacl = [ EACLRule(access=EACLAccess.DENY, role=EACLRole.OTHERS, operation=op) diff --git a/pytest_tests/testsuites/shard/test_control_shard.py b/pytest_tests/testsuites/shard/test_control_shard.py index 6ea4ee07b..d33059af0 100644 --- a/pytest_tests/testsuites/shard/test_control_shard.py +++ b/pytest_tests/testsuites/shard/test_control_shard.py @@ -90,7 +90,9 @@ def from_object(shard): writecache = shard["writecache"]["path"] if shard["writecache"]["enabled"] else "" else: writecache = ( - shard["writecache"]["path"] if "path" in shard["writecache"] else shard["writecache"] + shard["writecache"]["path"] + if "path" in shard["writecache"] + else shard["writecache"] ) return Shard( diff --git a/robot/resources/lib/python_keywords/acl.py b/robot/resources/lib/python_keywords/acl.py index 9a5941239..33358abb5 100644 --- a/robot/resources/lib/python_keywords/acl.py +++ b/robot/resources/lib/python_keywords/acl.py @@ -157,7 +157,9 @@ def _encode_cid_for_eacl(cid: str) -> str: def create_eacl(cid: str, rules_list: List[EACLRule], shell: Shell) -> str: - table_file_path = os.path.join(os.getcwd(), ASSETS_DIR, TEST_FILES_DIR, f"eacl_table_{str(uuid.uuid4())}.json") + table_file_path = os.path.join( + os.getcwd(), ASSETS_DIR, TEST_FILES_DIR, f"eacl_table_{str(uuid.uuid4())}.json" + ) cli = NeofsCli(shell, NEOFS_CLI_EXEC, WALLET_CONFIG) cli.acl.extended_create(cid=cid, out=table_file_path, rule=rules_list) diff --git a/robot/resources/lib/python_keywords/epoch.py b/robot/resources/lib/python_keywords/epoch.py index b55c569ec..aed038e73 100644 --- a/robot/resources/lib/python_keywords/epoch.py +++ b/robot/resources/lib/python_keywords/epoch.py @@ -36,12 +36,14 @@ def ensure_fresh_epoch( @allure.step("Wait for epochs align in whole cluster") @wait_for_success(60, 5) -def wait_for_epochs_align(shell: Shell, cluster: Cluster, epoch_number: Optional[int] = None) -> bool: +def wait_for_epochs_align( + shell: Shell, cluster: Cluster, epoch_number: Optional[int] = None +) -> bool: epochs = [] for node in cluster.storage_nodes: current_epoch = get_epoch(shell, cluster, node) assert ( - epoch_number is None or current_epoch > epoch_number + epoch_number is None or current_epoch > epoch_number ), f"Epoch {current_epoch} wasn't ticked yet. Expected epoch > {epoch_number}" epochs.append(current_epoch) unique_epochs = list(set(epochs)) @@ -90,7 +92,7 @@ def tick_epoch(shell: Shell, cluster: Cluster, alive_node: Optional[StorageNode] neofs_adm_exec_path=NEOFS_ADM_EXEC, config_file=NEOFS_ADM_CONFIG_PATH, ) - + neofsadm.morph.force_new_epoch( rpc_endpoint=morph_endpoint, alphabet_wallets="/".join(ir_wallet_path.split("/")[:-1]), @@ -120,8 +122,12 @@ def tick_epoch(shell: Shell, cluster: Cluster, alive_node: Optional[StorageNode] @allure.step("Tick Epoch and wait for epochs align") -def tick_epoch_and_wait(shell: Shell, cluster: Cluster, current_epoch: Optional[int] = None, - node: Optional[StorageNode] = None): +def tick_epoch_and_wait( + shell: Shell, + cluster: Cluster, + current_epoch: Optional[int] = None, + node: Optional[StorageNode] = None, +): current_epoch = current_epoch if current_epoch else get_epoch(shell, cluster, node) tick_epoch(shell, cluster, node) wait_for_epochs_align(shell, cluster, current_epoch) diff --git a/robot/resources/lib/python_keywords/failover_utils.py b/robot/resources/lib/python_keywords/failover_utils.py index 0dcf3493d..624bfb386 100644 --- a/robot/resources/lib/python_keywords/failover_utils.py +++ b/robot/resources/lib/python_keywords/failover_utils.py @@ -15,8 +15,13 @@ from neofs_testlib.hosting import Hosting from python_keywords.node_management import storage_node_healthcheck, stop_storage_nodes from storage_policy import get_nodes_with_object -from common import MORPH_CHAIN_SERVICE_NAME_REGEX, ENDPOINT_INTERNAL0, DOCKER_COMPOSE_ENV_FILE, \ - DOCKER_COMPOSE_STORAGE_CONFIG_FILE, METABASE_RESYNC_TIMEOUT +from common import ( + MORPH_CHAIN_SERVICE_NAME_REGEX, + ENDPOINT_INTERNAL0, + DOCKER_COMPOSE_ENV_FILE, + DOCKER_COMPOSE_STORAGE_CONFIG_FILE, + METABASE_RESYNC_TIMEOUT, +) logger = logging.getLogger("NeoLogger") @@ -71,7 +76,9 @@ def get_morph_chain_endpoints(hosting: Hosting) -> List[Tuple[str, str]]: endpoints = [] for config in morph_chain_config: if ENDPOINT_INTERNAL0 not in config.attributes: - raise ValueError(f"{ENDPOINT_INTERNAL0} is not present in the attributes of the config: {config}") + raise ValueError( + f"{ENDPOINT_INTERNAL0} is not present in the attributes of the config: {config}" + ) morph_chain_addr_full = config.attributes[ENDPOINT_INTERNAL0] parsed_url = urlparse(morph_chain_addr_full) addr = parsed_url.hostname @@ -91,8 +98,16 @@ def docker_compose_restart_storage_nodes(cluster: Cluster): wait_all_storage_nodes_returned(cluster) with allure.step("Log resync status"): for node in cluster.storage_nodes: - envs = subprocess.run(["docker", "inspect", "-f", "'{{range $index, $value := .Config.Env}}{{$value}} " - "{{end}}'", node.name], capture_output=True) + envs = subprocess.run( + [ + "docker", + "inspect", + "-f", + "'{{range $index, $value := .Config.Env}}{{$value}} " "{{end}}'", + node.name, + ], + capture_output=True, + ) env_stdout = envs.stdout.decode("utf-8") logger.debug(f"ENV from {node.name}: {env_stdout}") @@ -114,15 +129,15 @@ def enable_metabase_resync_on_start(cluster: Cluster): """ file_path = DOCKER_COMPOSE_ENV_FILE if not os.path.exists(file_path): - pytest.fail(f'File {file_path} does not exist!') + pytest.fail(f"File {file_path} does not exist!") - with open(file_path, 'r') as file: + with open(file_path, "r") as file: lines = file.readlines() logger.debug(f"Initial file content:\n{''.join(lines)}") replacements = { - 'NEOFS_STORAGE_SHARD_0_RESYNC_METABASE=false': 'NEOFS_STORAGE_SHARD_0_RESYNC_METABASE=true\n', - 'NEOFS_STORAGE_SHARD_1_RESYNC_METABASE=false': 'NEOFS_STORAGE_SHARD_1_RESYNC_METABASE=true\n' + "NEOFS_STORAGE_SHARD_0_RESYNC_METABASE=false": "NEOFS_STORAGE_SHARD_0_RESYNC_METABASE=true\n", + "NEOFS_STORAGE_SHARD_1_RESYNC_METABASE=false": "NEOFS_STORAGE_SHARD_1_RESYNC_METABASE=true\n", } unprocessed_lines = set(replacements.values()) @@ -138,9 +153,9 @@ def enable_metabase_resync_on_start(cluster: Cluster): modified_lines.extend(unprocessed_lines) - modified_content = ''.join(modified_lines) + modified_content = "".join(modified_lines) - with open(file_path, 'w') as file: + with open(file_path, "w") as file: file.write(modified_content) logger.debug(f"Modified file content:\n{modified_content}") @@ -149,7 +164,7 @@ def enable_metabase_resync_on_start(cluster: Cluster): yield - with open(file_path, 'w') as file: + with open(file_path, "w") as file: file.writelines(lines) logger.debug(f"Restored file content:\n{''.join(lines)}") @@ -158,4 +173,3 @@ def enable_metabase_resync_on_start(cluster: Cluster): with allure.step(f"Waiting {METABASE_RESYNC_TIMEOUT} seconds for the metabase to synchronize"): sleep(parse_time(METABASE_RESYNC_TIMEOUT)) - diff --git a/robot/resources/lib/python_keywords/http_gate.py b/robot/resources/lib/python_keywords/http_gate.py index 4f22f386a..07cbbcbb7 100644 --- a/robot/resources/lib/python_keywords/http_gate.py +++ b/robot/resources/lib/python_keywords/http_gate.py @@ -27,12 +27,12 @@ @allure.step("Get via HTTP Gate") def get_via_http_gate( - cid: str, - oid: str, - endpoint: str, - request_path: Optional[str] = None, - return_response = False, - download = False + cid: str, + oid: str, + endpoint: str, + request_path: Optional[str] = None, + return_response=False, + download=False, ) -> Union[str, requests.Response]: """ This function gets given object from HTTP gate @@ -46,7 +46,7 @@ def get_via_http_gate( # if `request_path` parameter ommited, use default download_attribute = "" if download: - download_attribute="?download=true" + download_attribute = "?download=true" if request_path is None: request = f"{endpoint}/get/{cid}/{oid}{download_attribute}" else: @@ -145,11 +145,7 @@ def get_via_http_gate_by_attribute( @allure.step("Upload via HTTP Gate") def upload_via_http_gate( - cid: str, - path: str, - endpoint: str, - headers: dict = None, - file_content_type: str = None + cid: str, path: str, endpoint: str, headers: dict = None, file_content_type: str = None ) -> str: """ This function upload given object through HTTP gate diff --git a/robot/resources/lib/python_keywords/neofs_verbs.py b/robot/resources/lib/python_keywords/neofs_verbs.py index a62ce0da8..dde68dab7 100644 --- a/robot/resources/lib/python_keywords/neofs_verbs.py +++ b/robot/resources/lib/python_keywords/neofs_verbs.py @@ -215,7 +215,7 @@ def put_object_to_random_node( expire_at, no_progress, session, - lifetime + lifetime, ) @@ -444,7 +444,7 @@ def search_object( session: Optional[str] = None, phy: bool = False, root: bool = False, - fail_on_assert = False + fail_on_assert=False, ) -> list: """ SEARCH an Object. diff --git a/robot/resources/lib/python_keywords/object_access.py b/robot/resources/lib/python_keywords/object_access.py index 33e3fe52a..1b0f0f4ff 100644 --- a/robot/resources/lib/python_keywords/object_access.py +++ b/robot/resources/lib/python_keywords/object_access.py @@ -145,8 +145,8 @@ def can_get_head_object( def _generate_random_range_cut(offset: int = 0, length: int = 10): # [X:0] requests are not allowed - offset = random.randint(offset, length-1) - length = length - random.randint(offset, length-1) + offset = random.randint(offset, length - 1) + length = length - random.randint(offset, length - 1) return f"{offset}:{length}" diff --git a/robot/variables/common.py b/robot/variables/common.py index ac3166b6d..7630eca42 100644 --- a/robot/variables/common.py +++ b/robot/variables/common.py @@ -26,10 +26,13 @@ TEST_FILES_DIR = os.getenv("TEST_FILES_DIR", "TestFilesDir") TEST_OBJECTS_DIR = os.getenv("TEST_OBJECTS_DIR", "TestObjectsDir") DEVENV_PATH = os.getenv("DEVENV_PATH", os.path.join("..", "neofs-dev-env")) -DOCKER_COMPOSE_STORAGE_CONFIG_FILE = os.getenv("DOCKER_COMPOSE_STORAGE_CONFIG_FILE", os.path.join(DEVENV_PATH, "services", "storage", - "docker-compose.yml")) -DOCKER_COMPOSE_ENV_FILE = os.getenv("DOCKER_COMPOSE_ENV_FILE", os.path.join(DEVENV_PATH, "services", "storage", - ".int_test.env")) +DOCKER_COMPOSE_STORAGE_CONFIG_FILE = os.getenv( + "DOCKER_COMPOSE_STORAGE_CONFIG_FILE", + os.path.join(DEVENV_PATH, "services", "storage", "docker-compose.yml"), +) +DOCKER_COMPOSE_ENV_FILE = os.getenv( + "DOCKER_COMPOSE_ENV_FILE", os.path.join(DEVENV_PATH, "services", "storage", ".int_test.env") +) # Password of wallet owned by user on behalf of whom we are running tests WALLET_PASS = os.getenv("WALLET_PASS", "") @@ -41,7 +44,9 @@ NEOFS_AUTHMATE_EXEC = os.getenv("NEOFS_AUTHMATE_EXEC", "neofs-s3-authmate") NEOFS_ADM_EXEC = os.getenv("NEOFS_ADM_EXEC", "neofs-adm") -NEOFS_ADM_CONFIG_PATH = os.getenv("NEOFS_ADM_CONFIG_PATH", os.path.join(DEVENV_PATH, "neofs-adm.yml")) +NEOFS_ADM_CONFIG_PATH = os.getenv( + "NEOFS_ADM_CONFIG_PATH", os.path.join(DEVENV_PATH, "neofs-adm.yml") +) FREE_STORAGE = os.getenv("FREE_STORAGE", "false").lower() == "true" BIN_VERSIONS_FILE = os.getenv("BIN_VERSIONS_FILE") diff --git a/tools/src/openssl_config_fix.py b/tools/src/openssl_config_fix.py index 464e04b87..8caf4b604 100755 --- a/tools/src/openssl_config_fix.py +++ b/tools/src/openssl_config_fix.py @@ -36,60 +36,60 @@ def setup_logging(): def modify_openssl_config(filename: Path): """Modify the openssl configuration file to support ripemd160.""" - logging.info(f'Modifying openssl config file at: {filename}') + logging.info(f"Modifying openssl config file at: {filename}") if not filename.is_file(): - logging.error(f'The file {filename} does not exist. Exiting.') + logging.error(f"The file {filename} does not exist. Exiting.") return try: - with open(filename, 'r') as file: + with open(filename, "r") as file: lines = file.readlines() - with tempfile.NamedTemporaryFile('w', delete=False) as temp_file: + with tempfile.NamedTemporaryFile("w", delete=False) as temp_file: in_provider_sect = False in_default_sect = False for line in lines: - if line.strip() == '#openssl_conf = openssl_init': - temp_file.write('openssl_conf = openssl_init\n') - logging.info('Enabled openssl_init') - elif line.strip() == '[provider_sect]': + if line.strip() == "#openssl_conf = openssl_init": + temp_file.write("openssl_conf = openssl_init\n") + logging.info("Enabled openssl_init") + elif line.strip() == "[provider_sect]": in_provider_sect = True temp_file.write(line) - elif in_provider_sect and line.strip() == 'default = default_sect': + elif in_provider_sect and line.strip() == "default = default_sect": temp_file.write(line) - temp_file.write('legacy = legacy_sect\n') - logging.info('Added legacy_sect to provider_sect') + temp_file.write("legacy = legacy_sect\n") + logging.info("Added legacy_sect to provider_sect") in_provider_sect = False - elif line.strip() == '[default_sect]': + elif line.strip() == "[default_sect]": in_default_sect = True temp_file.write(line) - elif in_default_sect and line.strip() == '# activate = 1': - temp_file.write('activate = 1\n') - logging.info('Activated default_sect') + elif in_default_sect and line.strip() == "# activate = 1": + temp_file.write("activate = 1\n") + logging.info("Activated default_sect") in_default_sect = False else: temp_file.write(line) - temp_file.write('[legacy_sect]\n') - temp_file.write('activate = 1\n') - logging.info('Added and activated legacy_sect') + temp_file.write("[legacy_sect]\n") + temp_file.write("activate = 1\n") + logging.info("Added and activated legacy_sect") os.chmod(temp_file.name, 0o644) os.replace(temp_file.name, str(filename)) except (IOError, PermissionError) as e: - logging.error(f'An error occurred while modifying {filename}: {e}') + logging.error(f"An error occurred while modifying {filename}: {e}") except Exception as e: - logging.error(f'An unexpected error occurred: {e}') + logging.error(f"An unexpected error occurred: {e}") - logging.info(f'Finished modifying openssl config file at: {filename}') + logging.info(f"Finished modifying openssl config file at: {filename}") def main(): setup_logging() - openssl_config_file = Path('/etc/ssl/openssl.cnf') + openssl_config_file = Path("/etc/ssl/openssl.cnf") modify_openssl_config(openssl_config_file) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tools/src/process-allure-reports.py b/tools/src/process-allure-reports.py index a0d5200ae..9bd6364a7 100644 --- a/tools/src/process-allure-reports.py +++ b/tools/src/process-allure-reports.py @@ -3,78 +3,103 @@ import argparse from allure_combine import combine_allure -COMBINE_DIR = 'combine' -RUN_NUMBER = 'RunNumber' # the key for the attribute -FILE_PATH = 'FilePath' # the key for the attribute, is the path for the static page and allure report zip files -COMPLETE_FILE_NAME = 'index.html' # file to write COMBINE_DIR/complete.html file data into +COMBINE_DIR = "combine" +RUN_NUMBER = "RunNumber" # the key for the attribute +FILE_PATH = "FilePath" # the key for the attribute, is the path for the static page and allure report zip files +COMPLETE_FILE_NAME = "index.html" # file to write COMBINE_DIR/complete.html file data into PUT_TIMEOUT = 600 # in seconds def parse_args(): - parser = argparse.ArgumentParser(description='Process allure reports') - parser.add_argument('--neofs_domain', required=True, type=str, help='NeoFS network domain, example: t5.fs.neo.org') - parser.add_argument('--wallet', required=True, type=str, help='Path to the wallet') - parser.add_argument('--cid', required=True, type=str, help='Container ID') - parser.add_argument('--run_id', required=True, type=str, help='GitHub run ID') - parser.add_argument('--allure_report', type=str, help='Path to generated allure report directory', - default='allure_report'), - parser.add_argument('--expire-at', type=int, - help='Expiration epoch. If epoch is not provided, or if it is 0, the report will be stored indefinitely', - default=None) + parser = argparse.ArgumentParser(description="Process allure reports") + parser.add_argument( + "--neofs_domain", + required=True, + type=str, + help="NeoFS network domain, example: t5.fs.neo.org", + ) + parser.add_argument("--wallet", required=True, type=str, help="Path to the wallet") + parser.add_argument("--cid", required=True, type=str, help="Container ID") + parser.add_argument("--run_id", required=True, type=str, help="GitHub run ID") + parser.add_argument( + "--allure_report", + type=str, + help="Path to generated allure report directory", + default="allure_report", + ), + parser.add_argument( + "--expire-at", + type=int, + help="Expiration epoch. If epoch is not provided, or if it is 0, the report will be stored indefinitely", + default=None, + ) return parser.parse_args() -def put_combine_result_as_static_page(directory: str, neofs_domain: str, wallet: str, cid: str, run_id: str, - expire_at: int, password: str) -> None: +def put_combine_result_as_static_page( + directory: str, + neofs_domain: str, + wallet: str, + cid: str, + run_id: str, + expire_at: int, + password: str, +) -> None: base_cmd = ( - f'NEOFS_CLI_PASSWORD={password} neofs-cli --rpc-endpoint st1.{neofs_domain}:8080 ' - f'--wallet {wallet} object put --cid {cid} --timeout {PUT_TIMEOUT}s' + f"NEOFS_CLI_PASSWORD={password} neofs-cli --rpc-endpoint st1.{neofs_domain}:8080 " + f"--wallet {wallet} object put --cid {cid} --timeout {PUT_TIMEOUT}s" ) if expire_at is not None and expire_at > 0: - base_cmd += f' --expire-at {expire_at}' + base_cmd += f" --expire-at {expire_at}" for subdir, dirs, files in os.walk(directory): current_dir_name = os.path.basename(subdir) for filename in files: filepath = subdir + os.sep + filename - base_cmd_with_file = f'{base_cmd} --file {filepath} --attributes {RUN_NUMBER}={run_id},' - if filename == 'complete.html' and current_dir_name == COMBINE_DIR: + base_cmd_with_file = f"{base_cmd} --file {filepath} --attributes {RUN_NUMBER}={run_id}," + if filename == "complete.html" and current_dir_name == COMBINE_DIR: # allure_combine combines the Allure report and saves it as a static page under the name "complete.html" # Later we will write a patch in allure_combine or fork it, but for now we will rename to "index.html" filename = COMPLETE_FILE_NAME - object_cmd = f'{base_cmd_with_file}{FILE_PATH}={run_id}/{filename}' - elif current_dir_name == 'attachments' and filename.endswith('.zip'): + object_cmd = f"{base_cmd_with_file}{FILE_PATH}={run_id}/{filename}" + elif current_dir_name == "attachments" and filename.endswith(".zip"): # We save the logs archives as separate objects in order to make a static page small size. # Without this, its size will be hundreds of megabytes. object_cmd = ( - f'{base_cmd_with_file}{FILE_PATH}={run_id}/data/{current_dir_name}/{filename},' - f'ContentType=application/zip' + f"{base_cmd_with_file}{FILE_PATH}={run_id}/data/{current_dir_name}/{filename}," + f"ContentType=application/zip" ) else: # Unfortunately, for a static page, we can't collect all the test artifacts. # So we do only archives with logs, other important data are contained in the static page. continue - print(f'Cmd: {object_cmd}') + print(f"Cmd: {object_cmd}") try: - compl_proc = subprocess.run(object_cmd, check=True, text=True, - stdout=subprocess.PIPE, stderr=subprocess.PIPE, timeout=PUT_TIMEOUT, - shell=True) + compl_proc = subprocess.run( + object_cmd, + check=True, + text=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + timeout=PUT_TIMEOUT, + shell=True, + ) - print(f'RC: {compl_proc.returncode}') - print(f'Output: {compl_proc.stdout}') - print(f'Error: {compl_proc.stderr}') + print(f"RC: {compl_proc.returncode}") + print(f"Output: {compl_proc.stdout}") + print(f"Error: {compl_proc.stderr}") except subprocess.CalledProcessError as e: raise Exception( - f'Command failed: {e.cmd}\n' - f'Error code: {e.returncode}\n' - f'Output: {e.output}\n' - f'Stdout: {e.stdout}\n' - f'Stderr: {e.stderr}\n' + f"Command failed: {e.cmd}\n" + f"Error code: {e.returncode}\n" + f"Output: {e.output}\n" + f"Stdout: {e.stdout}\n" + f"Stderr: {e.stderr}\n" ) @@ -94,18 +119,34 @@ def combine_report(allure_path: str) -> str: def get_password() -> str: - password = os.getenv('TEST_RESULTS_PASSWORD') + password = os.getenv("TEST_RESULTS_PASSWORD") return password -if __name__ == '__main__': +if __name__ == "__main__": args = parse_args() combine_path = combine_report(args.allure_report) neofs_password = get_password() - put_combine_result_as_static_page(combine_path, args.neofs_domain, args.wallet, args.cid, args.run_id, args.expire_at, - neofs_password) - put_combine_result_as_static_page(args.allure_report, args.neofs_domain, args.wallet, args.cid, args.run_id, - args.expire_at, neofs_password) + put_combine_result_as_static_page( + combine_path, + args.neofs_domain, + args.wallet, + args.cid, + args.run_id, + args.expire_at, + neofs_password, + ) + put_combine_result_as_static_page( + args.allure_report, + args.neofs_domain, + args.wallet, + args.cid, + args.run_id, + args.expire_at, + neofs_password, + ) - print(f'See report: https://http.{args.neofs_domain}/{args.cid}/{args.run_id}/{COMPLETE_FILE_NAME}') + print( + f"See report: https://http.{args.neofs_domain}/{args.cid}/{args.run_id}/{COMPLETE_FILE_NAME}" + ) diff --git a/tools/src/zip_dev_env_logs.py b/tools/src/zip_dev_env_logs.py index 5c9018cff..89fc7e36b 100644 --- a/tools/src/zip_dev_env_logs.py +++ b/tools/src/zip_dev_env_logs.py @@ -21,25 +21,25 @@ def save_container_logs(output_directory): with tempfile.TemporaryDirectory() as temp_output_directory: for container in containers: container_name = container.name - logging.info(f'Saving logs from container: {container_name}') + logging.info(f"Saving logs from container: {container_name}") - log_content = container.logs().decode('utf-8') - log_filename = f'{container_name}_logs.txt' + log_content = container.logs().decode("utf-8") + log_filename = f"{container_name}_logs.txt" log_file_path = os.path.join(temp_output_directory, log_filename) - with open(log_file_path, 'w', encoding='utf-8') as log_file: + with open(log_file_path, "w", encoding="utf-8") as log_file: log_file.write(log_content) - logging.info(f'Logs from container {container_name} saved to file: {log_file_path}') + logging.info(f"Logs from container {container_name} saved to file: {log_file_path}") - zip_filename = os.path.join(output_directory, 'containers_logs.zip') - with zipfile.ZipFile(zip_filename, 'w') as zip_file: + zip_filename = os.path.join(output_directory, "containers_logs.zip") + with zipfile.ZipFile(zip_filename, "w") as zip_file: for folder_name, subfolders, filenames in os.walk(temp_output_directory): for filename in filenames: file_path = os.path.join(folder_name, filename) zip_file.write(file_path, os.path.basename(file_path)) - logging.info(f'Containers logs saved to zip archive: {zip_filename}') + logging.info(f"Containers logs saved to zip archive: {zip_filename}") if __name__ == "__main__": diff --git a/tools/tests/test_modify_openssl_config.py b/tools/tests/test_modify_openssl_config.py index efb074f05..0a5317be1 100755 --- a/tools/tests/test_modify_openssl_config.py +++ b/tools/tests/test_modify_openssl_config.py @@ -10,8 +10,8 @@ from src.openssl_config_fix import modify_openssl_config -path_to_original = Path(__file__).parent.parent / 'data/original_openssl.cnf' -path_to_modified = Path(__file__).parent.parent / 'data/modified_openssl.cnf' +path_to_original = Path(__file__).parent.parent / "data/original_openssl.cnf" +path_to_modified = Path(__file__).parent.parent / "data/modified_openssl.cnf" @pytest.fixture @@ -28,12 +28,12 @@ def test_modify_openssl_config(temp_file): # Test modify_openssl_config modify_openssl_config(Path(temp_file)) if not filecmp.cmp(temp_file, path_to_modified): - with open(temp_file, 'r') as tempfile, open(path_to_modified, 'r') as modified_file: + with open(temp_file, "r") as tempfile, open(path_to_modified, "r") as modified_file: diff = difflib.unified_diff( tempfile.readlines(), modified_file.readlines(), - fromfile='temp_file', - tofile='path_to_modified', + fromfile="temp_file", + tofile="path_to_modified", ) - print(''.join(diff)) + print("".join(diff)) assert False