diff --git a/testing-suite/staking-v4/caching.py b/testing-suite/staking-v4/caching.py index 214011c9..4b55983e 100644 --- a/testing-suite/staking-v4/caching.py +++ b/testing-suite/staking-v4/caching.py @@ -3,6 +3,8 @@ from chain_commander import add_blocks import time +from utils.logger import logger + def force_reset_validator_statistics(): route = f"{DEFAULT_PROXY}/simulator/force-reset-validator-statistics" @@ -14,3 +16,4 @@ def force_reset_validator_statistics(): # wait 1 sec time.sleep(1) + logger.info("Validator statistics reset successfully and additional block added") diff --git a/testing-suite/staking-v4/chain_commander.py b/testing-suite/staking-v4/chain_commander.py index a96c2195..2a622875 100644 --- a/testing-suite/staking-v4/chain_commander.py +++ b/testing-suite/staking-v4/chain_commander.py @@ -1,14 +1,15 @@ import requests import json - from config import * from network_provider.get_transaction_info import get_status_of_tx from constants import * import time -from core.validatorKey import ValidatorKey + +from utils.logger import logger def send_egld_to_address(egld_amount, erd_address): + logger.info(f"Sending {egld_amount} to address {erd_address}") details = { 'address': f'{erd_address}', 'balance': f'{egld_amount}' @@ -18,93 +19,71 @@ def send_egld_to_address(egld_amount, erd_address): json_structure = json.dumps(details_list) response = requests.post(f"{DEFAULT_PROXY}/simulator/set-state", data=json_structure) response.raise_for_status() - + response_data = response.json() + logger.info(f"Transfer response: {response_data.get('message', 'Balance updated successfully')}") return response.text def add_blocks(nr_of_blocks): + logger.info(f"Requesting generation of {nr_of_blocks} blocks") response = requests.post(f"{DEFAULT_PROXY}/simulator/generate-blocks/{nr_of_blocks}") response.raise_for_status() + logger.info(f"Generated {nr_of_blocks} blocks; Response status: {response.status_code}") return response.text def get_block() -> int: response = requests.get(f"{DEFAULT_PROXY}/network/status/0") + response.raise_for_status() parsed = response.json() general_data = parsed.get("data") general_status = general_data.get("status") nonce = general_status.get("erd_nonce") + logger.info(f"Current block nonce: {nonce}") return nonce def add_blocks_until_epoch_reached(epoch_to_be_reached: int): + logger.info(f"Generating blocks until epoch {epoch_to_be_reached} is reached") req = requests.post(f"{DEFAULT_PROXY}/simulator/generate-blocks-until-epoch-reached/{str(epoch_to_be_reached)}") + req.raise_for_status() add_blocks(1) + logger.info(f"Epoch {epoch_to_be_reached} reached") return req.text def add_blocks_until_tx_fully_executed(tx_hash) -> str: - print("Checking: ", tx_hash) + logger.info(f"Checking status of transaction {tx_hash}") counter = 0 while counter < MAX_NUM_OF_BLOCKS_UNTIL_TX_SHOULD_BE_EXECUTED: add_blocks(1) time.sleep(WAIT_UNTIL_API_REQUEST_IN_SEC) - if get_status_of_tx(tx_hash) == "pending": + tx_status = get_status_of_tx(tx_hash) + if tx_status == "pending": + logger.info(f"Transaction {tx_hash} still pending after {counter} blocks") counter += 1 else: - print("Tx fully executed after", counter, " blocks.") - return get_status_of_tx(tx_hash) + logger.info(f"Transaction {tx_hash} executed after {counter} blocks") + return tx_status + raise Exception(f"Transaction {tx_hash} not executed within {MAX_NUM_OF_BLOCKS_UNTIL_TX_SHOULD_BE_EXECUTED} blocks.") def is_chain_online() -> bool: - flag = False - - while not flag: + while True: time.sleep(1) try: response = requests.get(f"{DEFAULT_PROXY}/network/status/0") - print(response) - flag = True - except requests.exceptions.ConnectionError: - print("Chain not started jet") - - return flag - - -def add_key(keys: list[ValidatorKey]) -> str: - private_keys = [] - for key in keys: - private_keys.append(key.get_private_key()) - - post_body = { - "privateKeysBase64": private_keys - } - - json_structure = json.dumps(post_body) - req = requests.post(f"{DEFAULT_PROXY}/simulator/add-keys", data=json_structure) - - return req.text - - -def add_blocks_until_key_eligible(keys: list[ValidatorKey]) -> ValidatorKey: - flag = False - while not flag: - for key in keys: - if key.get_state() == "eligible": - eligible_key = key - print("eligible key found") - flag = True - - else: - print("no eligible key found , moving to next epoch...") - current_epoch = proxy_default.get_network_status().epoch_number - add_blocks_until_epoch_reached(current_epoch+1) - add_blocks(3) - - return eligible_key + response.raise_for_status() + logger.info("Chain is online") + return True + except requests.exceptions.ConnectionError as e: + logger.warning("Chain not started yet: ConnectionError") + except Exception as e: + logger.error(f"Unexpected error when checking chain status: {str(e)}") + raise def add_blocks_until_last_block_of_current_epoch() -> str: @@ -117,6 +96,7 @@ def add_blocks_until_last_block_of_current_epoch() -> str: passed_nonces = status.get("erd_nonces_passed_in_current_epoch") blocks_to_be_added = rounds_per_epoch - passed_nonces + logger.info(f"Adding {blocks_to_be_added} blocks to reach the end of the current epoch") response_from_add_blocks = add_blocks(blocks_to_be_added) + logger.info(f"Reached the last block of the current epoch") return response_from_add_blocks - diff --git a/testing-suite/staking-v4/core/chain_simulator.py b/testing-suite/staking-v4/core/chain_simulator.py index 43dc7fda..a6aaf8e6 100644 --- a/testing-suite/staking-v4/core/chain_simulator.py +++ b/testing-suite/staking-v4/core/chain_simulator.py @@ -1,14 +1,11 @@ -import stat - -from constants import * from config import * import os import signal import subprocess -from subprocess import Popen -from threading import Thread import threading +from utils.logger import logger + class ChainSimulator: def __init__(self, path: Path) -> None: @@ -20,6 +17,12 @@ def __init__(self, path: Path) -> None: self.num_waiting_validators_meta = num_waiting_validators_meta self.rounds_per_epoch = rounds_per_epoch self.process = None + logger.info(f"Trying to Initialize ChainSimulator with configuration at {path}\n") + + # Check if the ChainSimulator binary exists in the specified path + if not os.path.exists(self.path / "chainsimulator"): + logger.error("ChainSimulator binary not found at the specified path.") + raise FileNotFoundError("ChainSimulator binary not found at the specified path.") def start(self): command = f"./chainsimulator --log-level {self.log_level} --rounds-per-epoch {rounds_per_epoch}\ @@ -27,6 +30,7 @@ def start(self): -num-waiting-validators-per-shard {num_waiting_validators_per_shard} \ -num-validators-meta {num_validators_meta} \ -num-waiting-validators-meta {num_waiting_validators_meta}" + command = ' '.join(command.split()) flag = True while flag: @@ -34,16 +38,42 @@ def start(self): command = command.replace(" ", " ") else: flag = False - print(command) + logger.info(f"Starting ChainSimulator with command: {command}") - self.process = subprocess.Popen(command, stdout=subprocess.PIPE, + self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, preexec_fn=os.setsid, cwd=chain_simulator_path) - out, err = self.process.communicate() - if err: - print(err) + stdout_thread = threading.Thread(target=self.read_output, args=(self.process.stdout,)) + stderr_thread = threading.Thread(target=self.read_output, args=(self.process.stderr, True)) + stdout_thread.start() + stderr_thread.start() + + def read_output(self, stream, is_error=False): + """Reads from a stream and logs the output.""" + try: + for line in iter(stream.readline, b''): + decoded_line = line.decode() + # TODO Use the code below in order to retreive the chain Simulator logs + # if is_error: + # logger.error(decoded_line.strip()) + # else: + # logger.info(decoded_line.strip()) + finally: + stream.close() + + def stop(self): + if self.process is not None: + # Send SIGTERM to the process group to cleanly stop all processes + os.killpg(os.getpgid(self.process.pid), signal.SIGTERM) - def stop(self) -> None: - self.process.terminate() + self.process.wait() + # Ensure output threads are also terminated + if hasattr(self, 'stdout_thread'): + self.stdout_thread.join() + if hasattr(self, 'stderr_thread'): + self.stderr_thread.join() + logger.info("ChainSimulator process and all child processes stopped\n") + else: + logger.warning("\nNo ChainSimulator process found.\n") diff --git a/testing-suite/staking-v4/core/validatorKey.py b/testing-suite/staking-v4/core/validatorKey.py index a04bee8d..f7d21d32 100644 --- a/testing-suite/staking-v4/core/validatorKey.py +++ b/testing-suite/staking-v4/core/validatorKey.py @@ -11,6 +11,7 @@ class ValidatorKey: def __init__(self, path: Path) -> None: self.path = path + logger.info(f"ValidatorKey initialized with path: {path}") def public_address(self) -> str: f = open(self.path) @@ -29,9 +30,11 @@ def get_status(self, owner_address: str): owner_address = Address.from_bech32(owner_address).to_hex() key_status_pair = get_bls_key_status([owner_address]) if key_status_pair is None: + logger.warning("No status found for any keys") return None for key, status in key_status_pair.items(): if key == self.public_address(): + logger.info(f"Status: {status} for BLS Key: {key} ") return status # is using /validator/statistics route @@ -47,15 +50,19 @@ def get_state(self): key_data = general_statistics.get(self.public_address()) if key_data is None: + logger.warning(f"No state data found for validator key: {key_data}") return None else: status = key_data.get("validatorStatus") + logger.info(f"Validator status is: {status}") return status # is using /validator/auction def get_auction_state(self): + logger.info(f"Resetting validator statistics before fetching auction state.") force_reset_validator_statistics() + logger.info(f"Requesting auction state from {OBSERVER_META}/validator/auction.") response = requests.get(f"{OBSERVER_META}/validator/auction") response.raise_for_status() parsed = response.json() @@ -69,18 +76,23 @@ def get_auction_state(self): if node_list.get("blsKey") == self.public_address(): state = node_list.get("qualified") if state: + logger.info(f"BLS key {self.public_address()} is qualified in the auction.") return "qualified" else: + logger.info(f"BLS key {self.public_address()} is unqualified in the auction.") return "unqualified" else: + logger.info(f"No auction data found for BLS key {self.public_address()}.") return None # using getOwner vm-query def belongs_to(self, address: str) -> bool: owner = get_owner([self.public_address()]) if owner == address: + logger.info(f"Checked ownership: True for address: {address}") return True else: + logger.info(f"Checked ownership: False for address: {address}") return False def get_private_key(self) -> str: @@ -93,5 +105,5 @@ def get_private_key(self) -> str: private_key += line if "\n" in private_key: private_key = private_key.replace("\n", "") - + logger.debug(f"Private key retrieved for {self.path}") return private_key \ No newline at end of file diff --git a/testing-suite/staking-v4/core/wallet.py b/testing-suite/staking-v4/core/wallet.py index f0db7b41..36c63bfd 100644 --- a/testing-suite/staking-v4/core/wallet.py +++ b/testing-suite/staking-v4/core/wallet.py @@ -5,53 +5,61 @@ from multiversx_sdk_wallet import UserSigner from multiversx_sdk_core import Address +from utils.logger import logger + class Wallet: def __init__(self, path: Path) -> None: self.path = path + logger.info(f"Wallet initialized with path: {self.path}") def public_address(self) -> str: - f = open(self.path) + with open(self.path) as f: + lines = f.readlines() - lines = f.readlines() for line in lines: if "BEGIN" in line: line = line.split(" ") - address = line[-1].replace("-----", "") - if "\n" in address: - address = address.replace("\n", "") - break - - return address + address = line[-1].replace("-----", "").strip() + return address def get_balance(self) -> int: - response = requests.get(f"{DEFAULT_PROXY}/address/{self.public_address()}/balance") + address = self.public_address() + logger.info(f"Fetching balance for address: {address}") + response = requests.get(f"{DEFAULT_PROXY}/address/{address}/balance") response.raise_for_status() parsed = response.json() general_data = parsed.get("data") balance = general_data.get("balance") + logger.info(f"Retrieved balance: {balance} for address: {address}") return balance - def set_balance(self, egld_amount): + address = self.public_address() + logger.info(f"Setting balance for address: {address} to {egld_amount}") details = { - 'address': f'{self.public_address()}', - 'balance': f'{egld_amount}' + 'address': address, + 'balance': egld_amount } details_list = [details] json_structure = json.dumps(details_list) req = requests.post(f"{DEFAULT_PROXY}/simulator/set-state", data=json_structure) + logger.info(f"Set balance request status: {req.status_code}") return req.text def get_signer(self) -> UserSigner: + logger.info("Creating UserSigner from PEM file.") return UserSigner.from_pem_file(self.path) def get_address(self) -> Address: - return Address.from_bech32(self.public_address()) + address = self.public_address() + return Address.from_bech32(address) def get_account(self): - return proxy_default.get_account(self.get_address()) + account = proxy_default.get_account(self.get_address()) + logger.info(f"Retrieved account details for: {account.address.to_bech32()}") + return account \ No newline at end of file diff --git a/testing-suite/staking-v4/delegation.py b/testing-suite/staking-v4/delegation.py index da78fa75..a93f989c 100644 --- a/testing-suite/staking-v4/delegation.py +++ b/testing-suite/staking-v4/delegation.py @@ -33,6 +33,34 @@ def create_new_delegation_contract(owner: Wallet, AMOUNT="1250000000000000000000 # send tx tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"New delegation contract created, transaction hash: {tx_hash}") + return tx_hash + + +def make_new_contract_from_validator_data(owner: Wallet, SERVICE_FEE="00", + DELEGATION_CAP="00") -> str: + # compute tx + tx = Transaction(sender=owner.get_address().to_bech32(), + receiver=SYSTEM_DELEGATION_MANAGER_CONTRACT, + nonce=owner.get_account().nonce, + gas_price=1000000000, + gas_limit=590000000, + chain_id=chain_id, + value=0) + + tx.data = f"makeNewContractFromValidatorData@{DELEGATION_CAP}@{SERVICE_FEE}".encode() + + tx_comp = TransactionComputer() + result_bytes = tx_comp.compute_bytes_for_signing(tx) + + signature = owner.get_signer().sign(result_bytes) + tx.signature = signature + + # send tx + tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"New contract from validator data created, transaction hash: {tx_hash}") return tx_hash @@ -58,6 +86,8 @@ def whitelist_for_merge(old_owner: Wallet, new_owner: Wallet, delegation_sc_addr # send tx tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"Whitelist for merge processed, transaction hash: {tx_hash}") return tx_hash @@ -83,6 +113,35 @@ def merge_validator_to_delegation_with_whitelist(new_owner: Wallet, delegation_s # send tx tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"Validator merged to delegation with whitelist, transaction hash: {tx_hash}") + return tx_hash + + +def merge_validator_to_delegation_same_owner(owner: Wallet, delegation_sc_address: str): + delegation_sc_address_as_hex = Address.from_bech32(delegation_sc_address).to_hex() + + # compute tx + tx = Transaction(sender=owner.get_address().to_bech32(), + receiver=SYSTEM_DELEGATION_MANAGER_CONTRACT, + nonce=owner.get_account().nonce, + gas_price=1000000000, + gas_limit=590000000, + chain_id=chain_id, + value=0) + + tx.data = f"mergeValidatorToDelegationSameOwner@{delegation_sc_address_as_hex}".encode() + + tx_comp = TransactionComputer() + result_bytes = tx_comp.compute_bytes_for_signing(tx) + + signature = owner.get_signer().sign(result_bytes) + tx.signature = signature + + # send tx + tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"Validator merged to delegation with the same owner, transaction hash: {tx_hash}") return tx_hash @@ -106,7 +165,7 @@ def add_nodes(owner: Wallet, delegation_sc_address: str, validatorKeys: list[Val chain_id=chain_id, value=0) - tx.data = f"addNodes@{stake_signature_and_public_key}".encode() + tx.data = f"addNodes{stake_signature_and_public_key}".encode() # prepare signature tx_comp = TransactionComputer() @@ -117,6 +176,8 @@ def add_nodes(owner: Wallet, delegation_sc_address: str, validatorKeys: list[Val # send tx tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"Nodes added to delegation, transaction hash: {tx_hash}") return tx_hash @@ -134,7 +195,7 @@ def stake_nodes(owner: Wallet, delegation_sc_address: str, validatorKeys: list[V chain_id=chain_id, value=0) - tx.data = f"stakeNodes@{pub_key_string}".encode() + tx.data = f"stakeNodes{pub_key_string}".encode() # prepare signature tx_comp = TransactionComputer() @@ -145,4 +206,6 @@ def stake_nodes(owner: Wallet, delegation_sc_address: str, validatorKeys: list[V # send tx tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"Nodes staked in delegation, transaction hash: {tx_hash}") return tx_hash diff --git a/testing-suite/staking-v4/network_provider/get_delegation_info.py b/testing-suite/staking-v4/network_provider/get_delegation_info.py index acc83a1a..5402c682 100644 --- a/testing-suite/staking-v4/network_provider/get_delegation_info.py +++ b/testing-suite/staking-v4/network_provider/get_delegation_info.py @@ -3,12 +3,16 @@ from helpers import base64_to_hex from multiversx_sdk_core import Address +from utils.logger import logger + def get_delegation_contract_address_from_tx(tx_hash): + logger.info(f"Fetching transaction details for hash: {tx_hash}") response = requests.get(f"{DEFAULT_PROXY}/transaction/{tx_hash}?withResults=True") response.raise_for_status() parsed = response.json() + logger.debug("Parsing transaction data") general_data = parsed.get("data") transaction_data = general_data.get("transaction") logs_data = transaction_data.get("logs") @@ -19,5 +23,5 @@ def get_delegation_contract_address_from_tx(tx_hash): delegation_contract_address = base64_to_hex(delegation_contract_address) delegation_contract_address = Address.from_hex(delegation_contract_address, "erd").to_bech32() - + logger.info(f"Delegation contract address obtained: {delegation_contract_address}") return delegation_contract_address diff --git a/testing-suite/staking-v4/network_provider/get_staking_info.py b/testing-suite/staking-v4/network_provider/get_staking_info.py index b6922694..da87d2e3 100644 --- a/testing-suite/staking-v4/network_provider/get_staking_info.py +++ b/testing-suite/staking-v4/network_provider/get_staking_info.py @@ -4,9 +4,12 @@ from constants import VALIDATOR_CONTRACT from config import DEFAULT_PROXY from helpers import base64_to_string +from utils.logger import logger def get_total_staked(owner: str): + logger.info(f"Fetching total staked for owner: {owner}") + address_in_hex = Address.from_bech32(owner).to_hex() post_body = { "scAddress": VALIDATOR_CONTRACT, @@ -15,6 +18,8 @@ def get_total_staked(owner: str): } json_structure = json.dumps(post_body) + logger.debug(f"Query payload prepared: {json_structure}") + response = requests.post(f"{DEFAULT_PROXY}/vm-values/query", data=json_structure) response.raise_for_status() parsed = response.json() @@ -25,4 +30,5 @@ def get_total_staked(owner: str): total_staked = total_staked_list[0] total_staked = base64_to_string(total_staked) + logger.info(f"Total staked for owner {owner}: {total_staked}") return total_staked diff --git a/testing-suite/staking-v4/network_provider/get_transaction_info.py b/testing-suite/staking-v4/network_provider/get_transaction_info.py index f42cb178..3257d5d3 100644 --- a/testing-suite/staking-v4/network_provider/get_transaction_info.py +++ b/testing-suite/staking-v4/network_provider/get_transaction_info.py @@ -2,9 +2,11 @@ import requests from config import DEFAULT_PROXY from helpers import string_to_base64 +from utils.logger import logger def get_status_of_tx(tx_hash: str) -> str: + logger.info(f"Checking transaction status for hash: {tx_hash}") response = requests.get(f"{DEFAULT_PROXY}/transaction/{tx_hash}/process-status") response.raise_for_status() parsed = response.json() @@ -14,20 +16,17 @@ def get_status_of_tx(tx_hash: str) -> str: general_data = parsed.get("data") status = general_data.get("status") + logger.info(f"Transaction status: {status} for tx_hash: {tx_hash}") return status def check_if_error_is_present_in_tx(error, tx_hash) -> bool: - flag = False + logger.info(f"Checking for error in transaction {tx_hash}") error_bytes = string_to_base64(error) response = requests.get(f"{DEFAULT_PROXY}/transaction/{tx_hash}?withResults=True") response.raise_for_status() + error_present = error_bytes.decode() in response.text or error in response.text + logger.info(f"Error presence: {error_present} | in tx_hash: {tx_hash}") - if error_bytes.decode() in response.text: - flag = True - - if error in response.text: - flag = True - - return error_bytes.decode() in response.text or error in response.text + return error_present diff --git a/testing-suite/staking-v4/network_provider/get_validator_info.py b/testing-suite/staking-v4/network_provider/get_validator_info.py index aaad0a95..f2224a6a 100644 --- a/testing-suite/staking-v4/network_provider/get_validator_info.py +++ b/testing-suite/staking-v4/network_provider/get_validator_info.py @@ -9,9 +9,11 @@ from helpers import base64_to_string from multiversx_sdk_core import Address from caching import force_reset_validator_statistics +from utils.logger import logger def get_bls_key_status(owner_public_key_in_hex: list[str]): + logger.info(f"Fetching BLS key status for public keys") key_status_pair = {} post_body = { @@ -26,6 +28,7 @@ def get_bls_key_status(owner_public_key_in_hex: list[str]): parsed = response.json() if '"returnData":null' in response.text: + logger.warning("No return data available for BLS keys status") return None general_data = parsed.get("data") @@ -39,10 +42,12 @@ def get_bls_key_status(owner_public_key_in_hex: list[str]): key_status_pair[bls_decoded] = status_decoded + logger.info(f"Successfully retrieved BLS key statuses") return key_status_pair def get_owner(public_validator_key: list[str]) -> str: + logger.info(f"Fetching owner for public validator key") post_body = { "scAddress": STAKING_CONTRACT, "funcName": "getOwner", @@ -56,6 +61,7 @@ def get_owner(public_validator_key: list[str]) -> str: parsed = response.json() if '"returnMessage":"owner address is nil"' in response.text: + logger.warning("No owner address found for given validator key") return "validatorKey not staked" general_data = parsed.get("data") @@ -66,11 +72,13 @@ def get_owner(public_validator_key: list[str]) -> str: address = base64_to_hex(address) address = Address.from_hex(address, "erd").to_bech32() + logger.info(f"Owner address successfully retrieved: {address}") return address # using validator/statistics def get_keys_state(keys: list) -> list[str]: + logger.info("Fetching states for validator keys") states = [] force_reset_validator_statistics() @@ -89,10 +97,13 @@ def get_keys_state(keys: list) -> list[str]: state = key_data.get("validatorStatus") states.append(state) + logger.info(f"Successfully retrieved states for {len(states)} keys") return states def get_keys_from_validator_auction(isQualified=True) -> list[str]: + logger.info(f"Fetching keys from validator auction with qualification status {isQualified}") + keys = [] force_reset_validator_statistics() @@ -110,10 +121,12 @@ def get_keys_from_validator_auction(isQualified=True) -> list[str]: if node_list.get("qualified") == isQualified: keys.append(node_list.get("blsKey")) + logger.info(f"Successfully retrieved {len(keys)} qualified keys from validator auction") return keys def get_keys_from_validator_statistics(needed_state: str) -> list[str]: + logger.info(f"Fetching keys from validator statistics with needed state: {needed_state}") keys = [] force_reset_validator_statistics() @@ -131,6 +144,7 @@ def get_keys_from_validator_statistics(needed_state: str) -> list[str]: if state == needed_state: keys.append(dict) + logger.info(f"Successfully retrieved {len(keys)} keys with state '{needed_state}' from validator statistics") return keys diff --git a/testing-suite/staking-v4/network_provider/key_management.py b/testing-suite/staking-v4/network_provider/key_management.py new file mode 100644 index 00000000..5ff9060b --- /dev/null +++ b/testing-suite/staking-v4/network_provider/key_management.py @@ -0,0 +1,45 @@ +from core.validatorKey import ValidatorKey +import requests +from chain_commander import add_blocks_until_epoch_reached, add_blocks +from config import proxy_default, DEFAULT_PROXY +import json + +from utils.logger import logger + + +def add_key(keys: list[ValidatorKey]) -> str: + + logger.info("Adding keys to simulator") + private_keys = [] + for key in keys: + private_keys.append(key.get_private_key()) + + post_body = { + "privateKeysBase64": private_keys + } + + json_structure = json.dumps(post_body) + req = requests.post(f"{DEFAULT_PROXY}/simulator/add-keys", data=json_structure) + + logger.info("Keys added successfully") + return req.text + + +def add_blocks_until_key_eligible(keys: list[ValidatorKey]) -> ValidatorKey: + logger.info("Attempting to reach eligibility for given keys") + flag = False + while not flag: + for key in keys: + if key.get_state() == "eligible": + eligible_key = key + print("eligible key found") + flag = True + + else: + print("no eligible key found , moving to next epoch...") + current_epoch = proxy_default.get_network_status().epoch_number + add_blocks_until_epoch_reached(current_epoch+1) + add_blocks(3) + + logger.info(f"Key {eligible_key.get_public_key()} is now eligible") + return eligible_key \ No newline at end of file diff --git a/testing-suite/staking-v4/pytest.ini b/testing-suite/staking-v4/pytest.ini new file mode 100644 index 00000000..302cbb01 --- /dev/null +++ b/testing-suite/staking-v4/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +testpaths = scenarios + +addopts = --tb=short -s \ No newline at end of file diff --git a/testing-suite/staking-v4/scenarios/_48.py b/testing-suite/staking-v4/scenarios/_48.py deleted file mode 100644 index 09f52ea4..00000000 --- a/testing-suite/staking-v4/scenarios/_48.py +++ /dev/null @@ -1,105 +0,0 @@ -import threading -from chain_commander import * -from network_provider.get_staking_info import get_total_staked -from staking import * -from core.validatorKey import ValidatorKey -from core.chain_simulator import ChainSimulator - -# Steps: -# 1) Stake with A 2 nodes -# 2) check if balance is - (5000 + gas fees) -# 3) check with getTotalStaked that has 5000 egld staked -# 4) check with getOwner if staked keys belongs to A -# 5) check with getBlsKeysStatus if keys are staked -# do this in epoch 3, 4, 5 and 6 - - -EPOCHS = [3, 4, 5, 6] -blockchain = ChainSimulator(chain_simulator_path) - - -def chain_start(): - print("chain is starting...") - blockchain.start() - - -def main(): - print("Happy testing") - - -def test_48(): - - def scenario(epoch: int): - - if is_chain_online(): - # === PRE-CONDITIONS ============================================================== - AMOUNT_TO_MINT = "6000" + "000000000000000000" - - _A = Wallet(Path("./wallets/walletKey_1.pem")) - - # check if minting is successful - assert "success" in _A.set_balance(AMOUNT_TO_MINT) - - # add some blocks - response = add_blocks(5) - assert "success" in response - time.sleep(0.5) - - # check balance - assert _A.get_balance() == AMOUNT_TO_MINT - - # move to epoch - assert "success" in add_blocks_until_epoch_reached(epoch) - - # === STEP 1 ============================================================== - # 1) Stake with A 2 nodes - VALIDATOR_KEY_1 = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) - VALIDATOR_KEY_2 = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) - A_Keys = [VALIDATOR_KEY_1, VALIDATOR_KEY_2] - - tx_hash = stake(_A, A_Keys) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # === STEP 2 ============================================================== - # 2) check balance of A to be - (5000+gas fees) - assert int(_A.get_balance()) < int(AMOUNT_TO_MINT) - 5000 - - # === STEP 3 ============================================================== - # 3) check total stake of A - total_staked = get_total_staked(_A.public_address()) - assert total_staked == "5000" + "000000000000000000" - - # === STEP 4 ============================================================== - # 4) check owner of keys - for key in A_Keys: - assert key.belongs_to(_A.public_address()) - - # === STEP 5 ============================================================== - # 5) check with getBlsKeysStatus if keys are staked or queued if epoch 3 - for key in A_Keys: - if epoch == 3: - assert key.get_status(_A.public_address()) == "queued" - else: - assert key.get_status(_A.public_address()) == "staked" - - # make sure all checks were done in needed epoch - assert proxy_default.get_network_status().epoch_number == epoch - # === FINISH =============================================================== - - # stop chain - blockchain.stop() - - # loop through all epochs needed for this scenario - for epoch in EPOCHS: - print(f"======================== EPOCH {epoch} =================================") - t1 = threading.Thread(target=chain_start) - t2 = threading.Thread(target=scenario, args=(epoch,)) - - t1.start(), t2.start() - t1.join(), t2.join() - - -if __name__ == '__main__': - main() diff --git a/testing-suite/staking-v4/scenarios/_49_50.py b/testing-suite/staking-v4/scenarios/_49_50.py deleted file mode 100644 index 4a6067f1..00000000 --- a/testing-suite/staking-v4/scenarios/_49_50.py +++ /dev/null @@ -1,133 +0,0 @@ -import threading - -from network_provider.get_transaction_info import check_if_error_is_present_in_tx -from chain_commander import * -from staking import * -from core.validatorKey import * -from core.chain_simulator import * - - -# Steps -# 1) Test 49 : Stake a node with an invalid bls key -# 2) Test 49.1 : Stake a node with an already staked bls key -# 3) Test 50 : Stake a node with less than 2500 egld - - -EPOCHS = [3, 4, 5, 6] -blockchain = ChainSimulator(chain_simulator_path) - - -def chain_start(): - print("chain is starting...") - blockchain.start() - - -def main(): - print("Happy testing") - - -def test_49_50(): - - def scenario(epoch: int): - - if is_chain_online(): - # === PRE-CONDITIONS ============================================================== - AMOUNT_TO_MINT = "6000" + "000000000000000000" - - _A = Wallet(Path("./wallets/walletKey_1.pem")) - _B = Wallet(Path("./wallets/walletKey_2.pem")) - _C = Wallet(Path("./wallets/walletKey_3.pem")) - - # check if minting is successful - assert "success" in _A.set_balance(AMOUNT_TO_MINT) - assert "success" in _B.set_balance(AMOUNT_TO_MINT) - assert "success" in _C.set_balance(AMOUNT_TO_MINT) - - # add some blocks - response = add_blocks(5) - assert "success" in response - time.sleep(0.5) - - # check balance - assert _A.get_balance() == AMOUNT_TO_MINT - assert _B.get_balance() == AMOUNT_TO_MINT - assert _C.get_balance() == AMOUNT_TO_MINT - - # move to epoch - assert "success" in add_blocks_until_epoch_reached(epoch) - - # === STEP 1 ============================================================== - # 1) Test 49 : Stake a node with an invalid bls key - - # BEGIN ######################################################################### - # THIS CODE IS COMMETED BECAUSE AFTER MULTIPLE TRIES IT IS NOT POSIBLE TO TEST ON CHAIN SIMULATOR - # THIS SCENARIO BECAUSE SIGNATURES DOES NOT MATTER , WE LEFT THE CODE HERE FOR FUTURE TESTING ON TESTNETS - # invalid_key = ValidatorKey(Path("./validatorKeys/invalid_bls_key_1.pem")) - # normal_key = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) - # - # # move few blocks and check tx , the py framework should fail the decoding because of the wrong bls key file - # # this way we test the framework too - # try: - # tx_hash = stake(_A, [invalid_key]) - # except Exception as error: - # print(error) - # assert "codec can't decode byte" in str(error) - # - # # send a malicious stake with wrong stake signature / public key / nr of nodes - # tx_hash = malicious_stake(_A, [normal_key], TX_DATA_MANIPULATOR=True) - # - # assert addBlocksUntilTxSucceeded(tx_hash) == "fail" - # END ############################################################################ - - # === STEP 2 ============================================================== - # 2) Test 49.1 : Stake a node with an already staked bls key - # stake a key for the first time - _key = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) - tx_hash = stake(_A, [_key]) - - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # make sure key is staked - if epoch == 3: - assert _key.get_status(_A.public_address()) == "queued" - else: - assert _key.get_status(_A.public_address()) == "staked" - - # stake same key again - tx_hash = stake(_B, [_key]) - - assert add_blocks_until_tx_fully_executed(tx_hash) == "fail" - - # check if it fails with the correct error message - assert check_if_error_is_present_in_tx("error bls key already registered", tx_hash) - - # === STEP 3 ============================================================== - # 3) Test 50 : Stake a node with less than 2500 egld - # send a malicious stake with less than 2500 egld - _key = ValidatorKey(Path("./validatorKeys/validatorKey_3.pem")) - tx_hash = malicious_stake(_C, [_key], AMOUNT_DEFICIT=1) - - assert add_blocks_until_tx_fully_executed(tx_hash) == "fail" - - # check if error message is present in tx - assert check_if_error_is_present_in_tx("insufficient stake value", tx_hash) - - # make sure all checks were done in needed epoch - assert proxy_default.get_network_status().epoch_number == epoch - # === FINISH =============================================================== - - # stop chain - blockchain.stop() - - # loop through all epochs needed for this scenario - for epoch in EPOCHS: - print(f"======================== EPOCH {epoch} =================================") - t1 = threading.Thread(target=chain_start) - t2 = threading.Thread(target=scenario, args=(epoch,)) - - t1.start(), t2.start() - t1.join(), t2.join() - - -if __name__ == '__main__': - main() diff --git a/testing-suite/staking-v4/scenarios/_68_69.py b/testing-suite/staking-v4/scenarios/_68_69.py deleted file mode 100644 index 9f85df71..00000000 --- a/testing-suite/staking-v4/scenarios/_68_69.py +++ /dev/null @@ -1,249 +0,0 @@ -# Config: -# -num-validators-per-shard 10 -# -num-waiting-validators-per-shard 6 -# -num-validators-meta 10 -# -num-waiting-validators-meta 6 -# max nr of nodes that a SP should have = 10% * total num validators (=40) = 4 - -# Steps : -# - We have Addresses A B C and D -# - 1) Stake 4 nodes with B in epoch 4 -# - 2) Stake 2 nodes with C in epoch 4 -# - 3) Stake 2 nodes with D in epoch 4 -# - 4) Create a delegation contract with A -# - 5) Merge C nodes in A's contract - should succeed -# - 6) Merge D nodes in A's contract - should succeed -# - 7) Merge B nodes in A's contract - should fail - -import delegation -import time -from config import * -from delegation import * -from chain_commander import * -from network_provider.get_delegation_info import get_delegation_contract_address_from_tx -from network_provider.get_transaction_info import check_if_error_is_present_in_tx -from staking import * -from delegation import * -from core.wallet import * -from core.validatorKey import * -import threading -from core.chain_simulator import * - -EPOCHS = [3, 4, 5, 6] -blockchain = ChainSimulator(chain_simulator_path) - - -def chain_start(): - print("chain is starting...") - blockchain.start() - -def main(): - print("Happy testing") - - -def test_68_69(): - - def scenario(epoch): - if is_chain_online(): - # === PRE-CONDITIONS ============================================================== - # mint addresses - AMOUNT_TO_MINT = "50000" + "000000000000000000" - - _A = Wallet(Path("./wallets/walletKey_1.pem")) - _B = Wallet(Path("./wallets/walletKey_2.pem")) - _C = Wallet(Path("./wallets/walletKey_3.pem")) - _D = Wallet(Path("./wallets/walletKey_4.pem")) - - # check minting request will succeed - assert "success" in _A.set_balance(AMOUNT_TO_MINT) - assert "success" in _B.set_balance(AMOUNT_TO_MINT) - assert "success" in _C.set_balance(AMOUNT_TO_MINT) - assert "success" in _D.set_balance(AMOUNT_TO_MINT) - - # add some blocks - response = add_blocks(5) - assert "success" in response - time.sleep(0.5) - - # check balances - assert _A.get_balance() == AMOUNT_TO_MINT - assert _B.get_balance() == AMOUNT_TO_MINT - assert _C.get_balance() == AMOUNT_TO_MINT - assert _D.get_balance() == AMOUNT_TO_MINT - - # go to needed epoch - time.sleep(1) - response = add_blocks_until_epoch_reached(epoch) - assert "success" in response - - # === STEP 1 =============================================================== - # 1) Stake 4 nodes with B - VALIDATOR_KEY_1 = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) - VALIDATOR_KEY_2 = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) - VALIDATOR_KEY_3 = ValidatorKey(Path("./validatorKeys/validatorKey_3.pem")) - VALIDATOR_KEY_4 = ValidatorKey(Path("./validatorKeys/validatorKey_4.pem")) - B_valid_keys_list = [VALIDATOR_KEY_1, VALIDATOR_KEY_2, VALIDATOR_KEY_3, VALIDATOR_KEY_4] - - # stake - tx_hash = stake(_B, B_valid_keys_list) - - # move on until tx is success - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # check bls keys statuses - for key in B_valid_keys_list: - if epoch == 3: - assert key.get_status(_B.public_address()) == "queued" - else: - assert key.get_status(_B.public_address()) == "staked" - - # check if owner is B - for key in B_valid_keys_list: - assert key.belongs_to(_B.public_address()) - - # === STEP 2 ================================================================ - # 2) Stake 2 nodes with C - VALIDATOR_KEY_5 = ValidatorKey(Path("./validatorKeys/validatorKey_5.pem")) - VALIDATOR_KEY_6 = ValidatorKey(Path("./validatorKeys/validatorKey_6.pem")) - C_valid_keys_list = [VALIDATOR_KEY_5, VALIDATOR_KEY_6] - - # stake - tx_hash = stake(_C, C_valid_keys_list) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # check bls keys statuses - for key in C_valid_keys_list: - if epoch == 3: - assert key.get_status(_C.public_address()) == "queued" - else: - assert key.get_status(_C.public_address()) == "staked" - - # check if owner is C - for key in C_valid_keys_list: - assert key.belongs_to(_C.public_address()) - - # === STEP 3 ============================================================ - # 3) Stake 2 nodes with D - VALIDATOR_KEY_7 = ValidatorKey(Path("./validatorKeys/validatorKey_7.pem")) - VALIDATOR_KEY_8 = ValidatorKey(Path("./validatorKeys/validatorKey_8.pem")) - D_valid_keys_list = [VALIDATOR_KEY_7, VALIDATOR_KEY_8] - - # stake - tx_hash = stake(_D, D_valid_keys_list) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # check bls keys statuses - for key in D_valid_keys_list: - if epoch == 3: - assert key.get_status(_D.public_address()) == "queued" - else: - assert key.get_status(_D.public_address()) == "staked" - - # check if owner is B - for key in D_valid_keys_list: - assert key.belongs_to(_D.public_address()) - - # === STEP 4 ============================================================ - # 4) Create a delegation contract with A - - # create contract - tx_hash = create_new_delegation_contract(_A) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # get delegation contract address - DELEGATION_CONTRACT_ADDRESS = get_delegation_contract_address_from_tx(tx_hash) - - # === STEP 5 ============================================================ - # 5) Merge C nodes in A's contract - should succeed - # 5.1 - send a whitelist for merge from A to C - tx_hash = whitelist_for_merge(_A, _C, DELEGATION_CONTRACT_ADDRESS) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # 5.2 - send merging tx from C - tx_hash = merge_validator_to_delegation_with_whitelist(_C, DELEGATION_CONTRACT_ADDRESS) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # check if keys from C were transfered to A's contract - for key in C_valid_keys_list: - assert key.belongs_to(DELEGATION_CONTRACT_ADDRESS) - - # check if keys are still staked - for key in C_valid_keys_list: - if epoch == 3: - assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "queued" - else: - assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "staked" - - # === STEP 6 ================================================== - # 6) Merge D nodes in A's contract - should succeed - # 6.1 - send a whitelist for merge from A to D - tx_hash = whitelist_for_merge(_A, _D, DELEGATION_CONTRACT_ADDRESS) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # 6.2 - send merging tx from A - tx_hash = merge_validator_to_delegation_with_whitelist(_D, DELEGATION_CONTRACT_ADDRESS) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # check if keys from C were transfered to A's contract - for key in C_valid_keys_list: - assert key.belongs_to(DELEGATION_CONTRACT_ADDRESS) - - # check if keys are still staked / queued - for key in C_valid_keys_list: - if epoch == 3: - assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "queued" - else: - assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "staked" - - # === STEP 7 =============================================================== - # 7) Merge B nodes in A's contract - should fail - # 7.1 - send a whitelist for merge from A to B - tx_hash = whitelist_for_merge(_A, _B, DELEGATION_CONTRACT_ADDRESS) - - # move few blocks and check tx - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - - # 7.2 - send merging tx from B - tx_hash = merge_validator_to_delegation_with_whitelist(_B, DELEGATION_CONTRACT_ADDRESS) - - # move few blocks and check tx if is failed - if epoch == 3: - assert add_blocks_until_tx_fully_executed(tx_hash) == "success" - else: - assert add_blocks_until_tx_fully_executed(tx_hash) == "fail" - # check reason of failure - assert check_if_error_is_present_in_tx("number of nodes is too high", tx_hash) - - # make sure all checks were done in needed epoch - assert proxy_default.get_network_status().epoch_number == epoch - # === FINISH =============================================================== - - # stop chain - blockchain.stop() - - # loop through all epochs needed for this scenario - for epoch in EPOCHS: - print(f"======================== EPOCH {epoch} =================================") - t1 = threading.Thread(target=chain_start) - t2 = threading.Thread(target=scenario, args=(epoch,)) - - t1.start(), t2.start() - t1.join(), t2.join() - - -if __name__ == '__main__': - main() diff --git a/testing-suite/staking-v4/scenarios/conftest.py b/testing-suite/staking-v4/scenarios/conftest.py new file mode 100644 index 00000000..a792e12b --- /dev/null +++ b/testing-suite/staking-v4/scenarios/conftest.py @@ -0,0 +1,16 @@ +import pytest + +from config import chain_simulator_path +from core.chain_simulator import ChainSimulator + + +@pytest.fixture(scope="function") +def blockchain(): + chain_simulator = ChainSimulator(chain_simulator_path) + chain_simulator.start() + yield chain_simulator + chain_simulator.stop() + +@pytest.fixture +def epoch(request): + return request.param diff --git a/testing-suite/staking-v4/scenarios/test_48_stake_nodes_with_sufficient_funds_and_valid_bls_key.py b/testing-suite/staking-v4/scenarios/test_48_stake_nodes_with_sufficient_funds_and_valid_bls_key.py new file mode 100644 index 00000000..869daf3c --- /dev/null +++ b/testing-suite/staking-v4/scenarios/test_48_stake_nodes_with_sufficient_funds_and_valid_bls_key.py @@ -0,0 +1,90 @@ +from chain_commander import * +from network_provider.get_staking_info import get_total_staked +from staking import * +from core.validatorKey import ValidatorKey +from core.chain_simulator import ChainSimulator +from utils.logger import logger +import pytest + +# Steps: +# 1) Stake with A 2 nodes +# 2) check if balance is - (5000 + gas fees) +# 3) check with getTotalStaked that has 5000 egld staked +# 4) check with getOwner if staked keys belongs to A +# 5) check with getBlsKeysStatus if keys are staked +# do this in epoch 3, 4, 5 and 6 + + +EPOCHS_ID = [3, 4, 5, 6] + + +def epoch_id(val): + return f"EPOCH-{val}" + + +def main(): + logger.info("Happy testing") + + +@pytest.mark.parametrize("epoch", EPOCHS_ID, indirect=True, ids=epoch_id) +def test_48_stake_nodes_with_sufficient_funds_and_valid_bls_key(blockchain, epoch): + # === PRE-CONDITIONS ============================================================== + assert True == is_chain_online() + AMOUNT_TO_MINT = "6000" + "000000000000000000" + + _A = Wallet(Path("./wallets/walletKey_1.pem")) + + # check if minting is successful + assert "success" in _A.set_balance(AMOUNT_TO_MINT) + + # add some blocks + response = add_blocks(5) + assert "success" in response + time.sleep(0.5) + + # check balance + assert _A.get_balance() == AMOUNT_TO_MINT + + # move to epoch + assert "success" in add_blocks_until_epoch_reached(epoch) + + # === STEP 1 ============================================================== + # 1) Stake with A 2 nodes + VALIDATOR_KEY_1 = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) + VALIDATOR_KEY_2 = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) + A_Keys = [VALIDATOR_KEY_1, VALIDATOR_KEY_2] + + tx_hash = stake(_A, A_Keys) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # === STEP 2 ============================================================== + # 2) check balance of A to be - (5000+gas fees) + assert int(_A.get_balance()) < int(AMOUNT_TO_MINT) - 5000 + + # === STEP 3 ============================================================== + # 3) check total stake of A + total_staked = get_total_staked(_A.public_address()) + assert total_staked == "5000" + "000000000000000000" + + # === STEP 4 ============================================================== + # 4) check owner of keys + for key in A_Keys: + assert key.belongs_to(_A.public_address()) + + # === STEP 5 ============================================================== + # 5) check with getBlsKeysStatus if keys are staked or queued if epoch 3 + for key in A_Keys: + if epoch == 3: + assert key.get_status(_A.public_address()) == "queued" + else: + assert key.get_status(_A.public_address()) == "staked" + + # make sure all checks were done in needed epoch + assert proxy_default.get_network_status().epoch_number == epoch + # === FINISH =============================================================== + + +if __name__ == '__main__': + main() diff --git a/testing-suite/staking-v4/scenarios/test_49_50_stake_nodes_failure_scenarios.py b/testing-suite/staking-v4/scenarios/test_49_50_stake_nodes_failure_scenarios.py new file mode 100644 index 00000000..7e2f8501 --- /dev/null +++ b/testing-suite/staking-v4/scenarios/test_49_50_stake_nodes_failure_scenarios.py @@ -0,0 +1,116 @@ +import threading + +from network_provider.get_transaction_info import check_if_error_is_present_in_tx +from chain_commander import * +from staking import * +from core.validatorKey import * +from core.chain_simulator import * +import pytest +# Steps +# 1) Test 49 : Stake a node with an invalid bls key +# 2) Test 49.1 : Stake a node with an already staked bls key +# 3) Test 50 : Stake a node with less than 2500 egld + + +EPOCHS_ID = [3, 4, 5, 6] + + +def epoch_id(val): + return f"EPOCH-{val}" + + +def main(): + print("Happy testing") + + +@pytest.mark.parametrize("epoch", EPOCHS_ID, indirect=True, ids=epoch_id) +def test_49_50_stake_nodes_failure_scenarios(blockchain, epoch): + # === PRE-CONDITIONS ============================================================== + assert True == is_chain_online() + AMOUNT_TO_MINT = "6000" + "000000000000000000" + + _A = Wallet(Path("./wallets/walletKey_1.pem")) + _B = Wallet(Path("./wallets/walletKey_2.pem")) + _C = Wallet(Path("./wallets/walletKey_3.pem")) + + # check if minting is successful + assert "success" in _A.set_balance(AMOUNT_TO_MINT) + assert "success" in _B.set_balance(AMOUNT_TO_MINT) + assert "success" in _C.set_balance(AMOUNT_TO_MINT) + + # add some blocks + response = add_blocks(5) + assert "success" in response + time.sleep(0.5) + + # check balance + assert _A.get_balance() == AMOUNT_TO_MINT + assert _B.get_balance() == AMOUNT_TO_MINT + assert _C.get_balance() == AMOUNT_TO_MINT + + # move to epoch + assert "success" in add_blocks_until_epoch_reached(epoch) + + # === STEP 1 ============================================================== + # 1) Test 49 : Stake a node with an invalid bls key + + # BEGIN ######################################################################### + # THIS CODE IS COMMENTED BECAUSE AFTER MULTIPLE TRIES IT IS NOT POSSIBLE TO TEST ON CHAIN SIMULATOR + # THIS SCENARIO BECAUSE SIGNATURES DOES NOT MATTER , WE LEFT THE CODE HERE FOR FUTURE TESTING ON TESTNETS + # invalid_key = ValidatorKey(Path("./validatorKeys/invalid_bls_key_1.pem")) + # normal_key = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) + # + # # move few blocks and check tx , the py framework should fail the decoding because of the wrong bls key file + # # this way we test the framework too + # try: + # tx_hash = stake(_A, [invalid_key]) + # except Exception as error: + # print(error) + # assert "codec can't decode byte" in str(error) + # + # # send a malicious stake with wrong stake signature / public key / nr of nodes + # tx_hash = malicious_stake(_A, [normal_key], TX_DATA_MANIPULATOR=True) + # + # assert addBlocksUntilTxSucceeded(tx_hash) == "fail" + # END ############################################################################ + + # === STEP 2 ============================================================== + # 2) Test 49.1 : Stake a node with an already staked bls key + # stake a key for the first time + _key = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) + tx_hash = stake(_A, [_key]) + + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # make sure key is staked + if epoch == 3: + assert _key.get_status(_A.public_address()) == "queued" + else: + assert _key.get_status(_A.public_address()) == "staked" + + # stake same key again + tx_hash = stake(_B, [_key]) + + assert add_blocks_until_tx_fully_executed(tx_hash) == "fail" + + # check if it fails with the correct error message + assert check_if_error_is_present_in_tx("error bls key already registered", tx_hash) + + # === STEP 3 ============================================================== + # 3) Test 50 : Stake a node with less than 2500 egld + # send a malicious stake with less than 2500 egld + _key = ValidatorKey(Path("./validatorKeys/validatorKey_3.pem")) + tx_hash = malicious_stake(_C, [_key], AMOUNT_DEFICIT=1) + + assert add_blocks_until_tx_fully_executed(tx_hash) == "fail" + + # check if error message is present in tx + assert check_if_error_is_present_in_tx("insufficient stake value", tx_hash) + + # make sure all checks were done in needed epoch + assert proxy_default.get_network_status().epoch_number == epoch + # === FINISH =============================================================== + + +if __name__ == '__main__': + main() diff --git a/testing-suite/staking-v4/scenarios/test_68_69_merge_validator_to_sp_with_varying_node_counts.py b/testing-suite/staking-v4/scenarios/test_68_69_merge_validator_to_sp_with_varying_node_counts.py new file mode 100644 index 00000000..0260a7df --- /dev/null +++ b/testing-suite/staking-v4/scenarios/test_68_69_merge_validator_to_sp_with_varying_node_counts.py @@ -0,0 +1,236 @@ +# Config: +# -num-validators-per-shard 10 +# -num-waiting-validators-per-shard 6 +# -num-validators-meta 10 +# -num-waiting-validators-meta 6 +# max nr of nodes that a SP should have = 10% * total num validators (=40) = 4 + +# Steps : +# - We have Addresses A B C and D +# - 1) Stake 4 nodes with B in epoch 4 +# - 2) Stake 2 nodes with C in epoch 4 +# - 3) Stake 2 nodes with D in epoch 4 +# - 4) Create a delegation contract with A +# - 5) Merge C nodes in A's contract - should succeed +# - 6) Merge D nodes in A's contract - should succeed +# - 7) Merge B nodes in A's contract - should fail + +import delegation +import time +from config import * +from delegation import * +from chain_commander import * +from network_provider.get_delegation_info import get_delegation_contract_address_from_tx +from network_provider.get_transaction_info import check_if_error_is_present_in_tx +from staking import * +from delegation import * +from core.wallet import * +from core.validatorKey import * +import threading +from core.chain_simulator import * +import pytest + +EPOCHS_ID = [3, 4, 5, 6] + + +def epoch_id(val): + return f"EPOCH-{val}" + + +def main(): + print("Happy testing") + + +@pytest.mark.parametrize("epoch", EPOCHS_ID, indirect=True, ids=epoch_id) +def test_68_69_merge_validator_to_sp_with_varying_node_counts(blockchain, epoch): + # === PRE-CONDITIONS ============================================================== + assert True == is_chain_online() + # mint addresses + AMOUNT_TO_MINT = "50000" + "000000000000000000" + + _A = Wallet(Path("./wallets/walletKey_1.pem")) + _B = Wallet(Path("./wallets/walletKey_2.pem")) + _C = Wallet(Path("./wallets/walletKey_3.pem")) + _D = Wallet(Path("./wallets/walletKey_4.pem")) + + # check minting request will succeed + assert "success" in _A.set_balance(AMOUNT_TO_MINT) + assert "success" in _B.set_balance(AMOUNT_TO_MINT) + assert "success" in _C.set_balance(AMOUNT_TO_MINT) + assert "success" in _D.set_balance(AMOUNT_TO_MINT) + + # add some blocks + response = add_blocks(5) + assert "success" in response + time.sleep(0.5) + + # check balances + assert _A.get_balance() == AMOUNT_TO_MINT + assert _B.get_balance() == AMOUNT_TO_MINT + assert _C.get_balance() == AMOUNT_TO_MINT + assert _D.get_balance() == AMOUNT_TO_MINT + + # go to needed epoch + time.sleep(1) + response = add_blocks_until_epoch_reached(epoch) + assert "success" in response + + # === STEP 1 =============================================================== + # 1) Stake 4 nodes with B + VALIDATOR_KEY_1 = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) + VALIDATOR_KEY_2 = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) + VALIDATOR_KEY_3 = ValidatorKey(Path("./validatorKeys/validatorKey_3.pem")) + VALIDATOR_KEY_4 = ValidatorKey(Path("./validatorKeys/validatorKey_4.pem")) + B_valid_keys_list = [VALIDATOR_KEY_1, VALIDATOR_KEY_2, VALIDATOR_KEY_3, VALIDATOR_KEY_4] + + # stake + tx_hash = stake(_B, B_valid_keys_list) + + # move on until tx is success + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check bls keys statuses + for key in B_valid_keys_list: + if epoch == 3: + assert key.get_status(_B.public_address()) == "queued" + else: + assert key.get_status(_B.public_address()) == "staked" + + # check if owner is B + for key in B_valid_keys_list: + assert key.belongs_to(_B.public_address()) + + # === STEP 2 ================================================================ + # 2) Stake 2 nodes with C + VALIDATOR_KEY_5 = ValidatorKey(Path("./validatorKeys/validatorKey_5.pem")) + VALIDATOR_KEY_6 = ValidatorKey(Path("./validatorKeys/validatorKey_6.pem")) + C_valid_keys_list = [VALIDATOR_KEY_5, VALIDATOR_KEY_6] + + # stake + tx_hash = stake(_C, C_valid_keys_list) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check bls keys statuses + for key in C_valid_keys_list: + if epoch == 3: + assert key.get_status(_C.public_address()) == "queued" + else: + assert key.get_status(_C.public_address()) == "staked" + + # check if owner is C + for key in C_valid_keys_list: + assert key.belongs_to(_C.public_address()) + + # === STEP 3 ============================================================ + # 3) Stake 2 nodes with D + VALIDATOR_KEY_7 = ValidatorKey(Path("./validatorKeys/validatorKey_7.pem")) + VALIDATOR_KEY_8 = ValidatorKey(Path("./validatorKeys/validatorKey_8.pem")) + D_valid_keys_list = [VALIDATOR_KEY_7, VALIDATOR_KEY_8] + + # stake + tx_hash = stake(_D, D_valid_keys_list) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check bls keys statuses + for key in D_valid_keys_list: + if epoch == 3: + assert key.get_status(_D.public_address()) == "queued" + else: + assert key.get_status(_D.public_address()) == "staked" + + # check if owner is B + for key in D_valid_keys_list: + assert key.belongs_to(_D.public_address()) + + # === STEP 4 ============================================================ + # 4) Create a delegation contract with A + + # create contract + tx_hash = create_new_delegation_contract(_A) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # get delegation contract address + DELEGATION_CONTRACT_ADDRESS = get_delegation_contract_address_from_tx(tx_hash) + + # === STEP 5 ============================================================ + # 5) Merge C nodes in A's contract - should succeed + # 5.1 - send a whitelist for merge from A to C + tx_hash = whitelist_for_merge(_A, _C, DELEGATION_CONTRACT_ADDRESS) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # 5.2 - send merging tx from C + tx_hash = merge_validator_to_delegation_with_whitelist(_C, DELEGATION_CONTRACT_ADDRESS) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check if keys from C were transfered to A's contract + for key in C_valid_keys_list: + assert key.belongs_to(DELEGATION_CONTRACT_ADDRESS) + + # check if keys are still staked + for key in C_valid_keys_list: + if epoch == 3: + assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "queued" + else: + assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "staked" + + # === STEP 6 ================================================== + # 6) Merge D nodes in A's contract - should succeed + # 6.1 - send a whitelist for merge from A to D + tx_hash = whitelist_for_merge(_A, _D, DELEGATION_CONTRACT_ADDRESS) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # 6.2 - send merging tx from A + tx_hash = merge_validator_to_delegation_with_whitelist(_D, DELEGATION_CONTRACT_ADDRESS) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check if keys from C were transfered to A's contract + for key in C_valid_keys_list: + assert key.belongs_to(DELEGATION_CONTRACT_ADDRESS) + + # check if keys are still staked / queued + for key in C_valid_keys_list: + if epoch == 3: + assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "queued" + else: + assert key.get_status(DELEGATION_CONTRACT_ADDRESS) == "staked" + + # === STEP 7 =============================================================== + # 7) Merge B nodes in A's contract - should fail + # 7.1 - send a whitelist for merge from A to B + tx_hash = whitelist_for_merge(_A, _B, DELEGATION_CONTRACT_ADDRESS) + + # move few blocks and check tx + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # 7.2 - send merging tx from B + tx_hash = merge_validator_to_delegation_with_whitelist(_B, DELEGATION_CONTRACT_ADDRESS) + + # move few blocks and check tx if is failed + if epoch == 3: + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + else: + assert add_blocks_until_tx_fully_executed(tx_hash) == "fail" + # check reason of failure + assert check_if_error_is_present_in_tx("number of nodes is too high", tx_hash) + + # make sure all checks were done in needed epoch + assert proxy_default.get_network_status().epoch_number == epoch + # === FINISH =============================================================== + + +if __name__ == '__main__': + main() diff --git a/testing-suite/staking-v4/scenarios/test_70_75_validator_to_sp_transitions_and_validations.py b/testing-suite/staking-v4/scenarios/test_70_75_validator_to_sp_transitions_and_validations.py new file mode 100644 index 00000000..859cb4ed --- /dev/null +++ b/testing-suite/staking-v4/scenarios/test_70_75_validator_to_sp_transitions_and_validations.py @@ -0,0 +1,184 @@ +import time + +from core.wallet import Wallet +from core.validatorKey import ValidatorKey +from chain_commander import add_blocks, add_blocks_until_epoch_reached, add_blocks_until_tx_fully_executed, \ + is_chain_online +from network_provider.key_management import add_key +from pathlib import Path +from delegation import create_new_delegation_contract, merge_validator_to_delegation_same_owner, \ + merge_validator_to_delegation_with_whitelist, whitelist_for_merge +from network_provider.get_delegation_info import get_delegation_contract_address_from_tx +from delegation import add_nodes, stake_nodes +from staking import stake +import pytest + +# General : +# In this scenario we will test all posibilities of creating delegation contracts, and test that +# every contract will work properly (happy path) with users interactions: delegate, undelegate, claim, nodes merging etc +# We will run the scenario starting with epoch 3 , 4 and 5. + +# Steps: +# 1) In epoch x create 6 users. Each user will be creating a delegation contract in a diferent way / diferent context +# All users will create the contract with fixed delegation cap of 10000 egld and with 10% service fee using 2 keys. +# 1.1) User A - will use createNewDelegationContract function +# 1.2) User B - will use createNewDelegationContract function, but the second node will be added +# with mergeValidatorToDelegationSameOwner +# 1.3) User C - will use createNewDelegationContract function, but the second node will be added +# with whitelistForMerge +# 1.4) User D - will use makeNewContractFromValidatorData function +# 1.5) User F - will use makeNewContractFromValidatorData function, but the second node will be added +# with mergeValidatorToDelegationSameOwner +# 1.6) User G - will use makeNewContractFromValidatorData function, but the second node will be added +# with whitelistForMerge +# 2) In epoch x create 6 delegators that will delegate 1 egld to each user. After each delegation check that: +# 2.1) Balance of the delegator decreased with 1 egld + transaction fee +# 2.2) Check with getTotalActiveStake vm-query that the account has 1 egld staked. +# 2.3) Check with getTotalStaked vm-query that the SP contract stake has increased with 1 egld. +# 3) Create 1 temp delegator that will only test that the delegation cap of each contract will not be overfulfilled +# 4) In epoch x+10 check rewards for each delegator and should not be very big differences. +# 5) In epoch x+10 redelegate rewards with all delegators. +# 6) In epoch x+20 check rewards for each delegator and should not be very big differences. +# 7) In epoch x+20 claim rewards with all delegators +# 8) In epoch x+20 undelegate everything with all delegators +# 9) In epoch x+20 withdraw all with all delegators + +EPOCHS_ID = [3, 4, 5, 6] + + +def epoch_id(val): + return f"EPOCH-{val}" + + +def main(): + print("Happy testing") + + +@pytest.mark.parametrize("epoch", EPOCHS_ID, indirect=True, ids=epoch_id) +def test_70_75_validator_to_sp_transitions_and_validations(blockchain, epoch): + # === PRE-CONDITIONS ============================================================== + assert True == is_chain_online() + + # mint addresses + AMOUNT_TO_MINT = "6000" + "000000000000000000" + AMOUNT_FOR_SECONDARY_WALLET = "30000" + "000000000000000000" + + _A = Wallet(Path("./wallets/walletKey_1.pem")) + key_1 = ValidatorKey(Path("./validatorKeys/validatorKey_1.pem")) + key_2 = ValidatorKey(Path("./validatorKeys/validatorKey_2.pem")) + A_keys = [key_1, key_2] + + _B = Wallet(Path("./wallets/walletKey_2.pem")) + key_3 = ValidatorKey(Path("./validatorKeys/validatorKey_3.pem")) + key_4 = ValidatorKey(Path("./validatorKeys/validatorKey_4.pem")) + B_keys = [key_3, key_4] + + _C = Wallet(Path("./wallets/walletKey_3.pem")) + key_5 = ValidatorKey(Path("./validatorKeys/validatorKey_5.pem")) + key_6 = ValidatorKey(Path("./validatorKeys/validatorKey_6.pem")) + C_keys = [key_5, key_6] + + _D = Wallet(Path("./wallets/walletKey_4.pem")) + key_7 = ValidatorKey(Path("./validatorKeys/validatorKey_7.pem")) + key_8 = ValidatorKey(Path("./validatorKeys/validatorKey_8.pem")) + D_keys = [key_7, key_8] + + _E = Wallet(Path("./wallets/walletKey_5.pem")) + key_9 = ValidatorKey(Path("./validatorKeys/validatorKey_9.pem")) + key_10 = ValidatorKey(Path("./validatorKeys/validatorKey_10.pem")) + E_keys = [key_9, key_10] + + _F = Wallet(Path("./wallets/walletKey_6.pem")) + key_11 = ValidatorKey(Path("./validatorKeys/validatorKey_11.pem")) + key_12 = ValidatorKey(Path("./validatorKeys/validatorKey_12.pem")) + F_keys = [key_11, key_12] + + _G = Wallet(Path("./wallets/walletKey_7.pem")) + all_keys = A_keys + B_keys + C_keys + D_keys + E_keys + F_keys + + # check minting request will succeed + assert "success" in _A.set_balance(AMOUNT_TO_MINT) + assert "success" in _B.set_balance(AMOUNT_TO_MINT) + assert "success" in _C.set_balance(AMOUNT_TO_MINT) + assert "success" in _D.set_balance(AMOUNT_TO_MINT) + assert "success" in _E.set_balance(AMOUNT_TO_MINT) + assert "success" in _F.set_balance(AMOUNT_TO_MINT) + assert "success" in _G.set_balance(AMOUNT_FOR_SECONDARY_WALLET) + + # add some blocks + response = add_blocks(5) + assert "success" in response + + # check balances + assert _A.get_balance() == AMOUNT_TO_MINT + assert _B.get_balance() == AMOUNT_TO_MINT + assert _C.get_balance() == AMOUNT_TO_MINT + assert _D.get_balance() == AMOUNT_TO_MINT + assert _E.get_balance() == AMOUNT_TO_MINT + assert _F.get_balance() == AMOUNT_TO_MINT + assert _G.get_balance() == AMOUNT_FOR_SECONDARY_WALLET + + # add all keys to protocol so they will be not jailed + assert "success" in add_key(all_keys) + + # go to needed epoch + response = add_blocks_until_epoch_reached(epoch) + assert "success" in response + + # === STEP 1 =============================================================== + # 1) In epoch x create 6 users. Each user will be creating a delegation contract in a diferent way/diferent context + # All users will create the contract with fixed delegation cap of 10000 egld and with 10% service fee using 2 keys. + # 1.1) User A - will use createNewDelegationContract function + + tx_hash = create_new_delegation_contract(_A, AMOUNT="5000000000000000000000", SERVICE_FEE="03e8", + DELEGATION_CAP="021e19e0c9bab2400000") + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + SP_address_for_A = get_delegation_contract_address_from_tx(tx_hash) + + tx_hash = add_nodes(_A, SP_address_for_A, A_keys) + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + tx_hash = stake_nodes(_A, SP_address_for_A, A_keys) + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check if nodes are staked + for key in A_keys: + assert key.get_status(SP_address_for_A) == "staked" + + # 1.2) User B - will use createNewDelegationContract function, but the second node will be added + # with mergeValidatorToDelegationSameOwner + + tx_hash = create_new_delegation_contract(_B, AMOUNT="2500000000000000000000", SERVICE_FEE="03e8", + DELEGATION_CAP="021e19e0c9bab2400000") + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + SP_address_for_B = get_delegation_contract_address_from_tx(tx_hash) + + # add and stake_nodes only 1 key, the other will be added with mergeValidatorToDelegationSameOwner + tx_hash = add_nodes(_B, SP_address_for_B, [B_keys[0]]) + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + tx_hash = stake_nodes(_B, SP_address_for_B, [B_keys[0]]) + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check if the node is staked + assert B_keys[0].get_status(SP_address_for_B) == "staked" + + # stake the other key, but not through the delegation contract + tx_hash = stake(_B, [B_keys[1]]) + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check if the node is staked + assert B_keys[1].get_status(_B.public_address()) == "staked" + + # merge the normal staked key with the delegation contract + tx_hash = merge_validator_to_delegation_same_owner(_B, SP_address_for_B) + assert add_blocks_until_tx_fully_executed(tx_hash) == "success" + + # check if the new owner is now the delegation contract + assert B_keys[1].belongs_to(SP_address_for_B) + + +if __name__ == '__main__': + main() diff --git a/testing-suite/staking-v4/scenarios/PR_6114.py b/testing-suite/staking-v4/scenarios/test_PR_6114_key_auction_process_through_epochs.py similarity index 96% rename from testing-suite/staking-v4/scenarios/PR_6114.py rename to testing-suite/staking-v4/scenarios/test_PR_6114_key_auction_process_through_epochs.py index dd899927..00cf6388 100644 --- a/testing-suite/staking-v4/scenarios/PR_6114.py +++ b/testing-suite/staking-v4/scenarios/test_PR_6114_key_auction_process_through_epochs.py @@ -8,8 +8,9 @@ from staking import stake, unStake, unBondNodes from network_provider.get_validator_info import get_keys_state, get_keys_from_validator_statistics, \ get_keys_from_validator_auction -import requests +from network_provider.key_management import add_blocks_until_key_eligible, add_key +import requests # SCENARIO 1 # Have every epoch auction list with enough nodes (let's say 8 qualified, 2 unqualified) @@ -45,13 +46,18 @@ # 5.3) we will now have 9 keys in auction, 8 of them are qualified, 1 not qualified , 40 keys eligible, 16 waiting + +def epoch_id(val): + return f"EPOCH-{val}" + def main(): print("Happy testing") -def test_PR_6114(): +def test_PR_6114_key_auction_process_through_epochs(blockchain): # === PRE-CONDITIONS ============================================================== + assert True == is_chain_online() AMOUNT_TO_MINT = "10000" + "000000000000000000" wallet_a = Wallet(Path("./wallets/walletKey_1.pem")) diff --git a/testing-suite/staking-v4/scenarios/unHappyPaths_delegation.py b/testing-suite/staking-v4/scenarios/unHappyPaths_delegation.py new file mode 100644 index 00000000..4e2f38a3 --- /dev/null +++ b/testing-suite/staking-v4/scenarios/unHappyPaths_delegation.py @@ -0,0 +1,24 @@ + + +# General: +# here will be tested most of the cases that should fail regarding interaction with stakign providers + +# Steps: +# 1) Create a new delegation contract with "createNewDelegationContract" function with less than 1250 egld. +# 1.1) Tx should fail +# 1.2) Check that wallet balance is the same like before sending the tx - gass fees +# 1.3) Check te error message after sending the tx +# 2) Create a new delegation contract with 1250 egld with 0% fees and 4900 egld delegation cap - tx should pass +# 3) Add a new key to the contract +# 4) Stake a key to the contract that is not added +# 4.1) Tx should fail +# 4.2) Check error of the tx +# 5) Stake the key from point 3) +# 5.1) Tx should fail +# 5.2) Check error of tx +# 6) Delegate with another user 1250 egld to the contract +# 7) Stake the key from point 3) - now tx should pass + + + + diff --git a/testing-suite/staking-v4/staking.py b/testing-suite/staking-v4/staking.py index adc36b76..98db50a9 100644 --- a/testing-suite/staking-v4/staking.py +++ b/testing-suite/staking-v4/staking.py @@ -14,7 +14,6 @@ def stake(wallet: Wallet, validatorKeys: list[ValidatorKey]): - # nr of nodes staked nr_of_nodes_staked = len(validatorKeys) nr_of_nodes_staked = decimal_to_hex(nr_of_nodes_staked) @@ -54,6 +53,7 @@ def stake(wallet: Wallet, validatorKeys: list[ValidatorKey]): # send tx tx_hash = proxy_default.send_transaction(tx) + logger.info(f"Staking transaction sent, transaction hash: {tx_hash}") return tx_hash @@ -101,11 +101,12 @@ def malicious_stake(wallet: Wallet, validatorKeys: list[ValidatorKey], AMOUNT_DE # send tx tx_hash = proxy_default.send_transaction(tx) + + logger.info(f"Malicious staking transaction sent, transaction hash: {tx_hash}") return tx_hash def unStake(wallet: Wallet, validator_key: ValidatorKey) -> str: - # create transaction tx = Transaction(sender=wallet.get_address().to_bech32(), receiver=VALIDATOR_CONTRACT, @@ -126,11 +127,12 @@ def unStake(wallet: Wallet, validator_key: ValidatorKey) -> str: # send tx tx_hash = proxy_default.send_transaction(tx) - return tx_hash + logger.info(f"Unstaking transaction sent for key {validator_key.public_address()}, transaction hash: {tx_hash}") + return tx_hash -def unBondNodes(wallet : Wallet, validator_key: ValidatorKey) -> str: +def unBondNodes(wallet: Wallet, validator_key: ValidatorKey) -> str: # create transaction tx = Transaction(sender=wallet.get_address().to_bech32(), receiver=VALIDATOR_CONTRACT, @@ -151,5 +153,7 @@ def unBondNodes(wallet : Wallet, validator_key: ValidatorKey) -> str: # send tx tx_hash = proxy_default.send_transaction(tx) - return tx_hash + logger.info( + f"Un-bonding nodes transaction sent for key {validator_key.public_address()}, transaction hash: {tx_hash}") + return tx_hash diff --git a/testing-suite/staking-v4/utils/logger.py b/testing-suite/staking-v4/utils/logger.py new file mode 100644 index 00000000..fec8b733 --- /dev/null +++ b/testing-suite/staking-v4/utils/logger.py @@ -0,0 +1,12 @@ +import logging + +def get_logger(name): + logger = logging.getLogger(name) + if not logger.handlers: # Avoid adding multiple handlers to the same logger + logger.setLevel(logging.DEBUG) + ch = logging.StreamHandler() + ch.setFormatter(logging.Formatter('[%(asctime)s] - [%(levelname)s] - %(message)s')) + logger.addHandler(ch) + return logger + +logger = get_logger(__name__) \ No newline at end of file