From a2762017388ef4a298fbdb772418e58b88766a8b Mon Sep 17 00:00:00 2001 From: "Frank V. Castellucci" Date: Sat, 21 Dec 2024 15:56:15 -0500 Subject: [PATCH] Release 0.74.0 --- doc/source/graphql_qnodes.rst | 4 +- doc/source/graphql_serial_exc.rst | 57 +++++++++++++++++++ doc/source/index.rst | 1 + pysui/sui/sui_pgql/execute/caching_exec.py | 5 +- .../sui_pgql/execute/caching_tx_builder.py | 27 ++++----- pysui/sui/sui_pgql/execute/caching_txn.py | 4 +- pysui/sui/sui_pgql/execute/serial_exec.py | 33 +++++++---- 7 files changed, 100 insertions(+), 31 deletions(-) create mode 100644 doc/source/graphql_serial_exc.rst diff --git a/doc/source/graphql_qnodes.rst b/doc/source/graphql_qnodes.rst index 0bd2be5..65abab8 100644 --- a/doc/source/graphql_qnodes.rst +++ b/doc/source/graphql_qnodes.rst @@ -1,6 +1,6 @@ -QueryNodes -========== +GraphQL QueryNodes +=================== General ------- diff --git a/doc/source/graphql_serial_exc.rst b/doc/source/graphql_serial_exc.rst new file mode 100644 index 0000000..4907a92 --- /dev/null +++ b/doc/source/graphql_serial_exc.rst @@ -0,0 +1,57 @@ + +GraohQL SerialTransactionExecutor +================================= + +General +------- + +The SerialTransactionExecutor is implemented along with GraphQL support. It highly efficient when creating transactions +signed by the same address and where the objects owned by the address executing transactions are unlikely to be +changed by transactions not executed through this executor. + +The executor is backed by an object cache that is initially populated by resolving the first transactions objects. said objects +are updated (diged, version) by the transaction results. Subsequent transactions objects are then resolved by the cache as is +the gas object used to pay for the transaction. + +Note that all available gas objects will be smashed to one gas object during transaction build processing. + +Here is a simple example: + +.. code-block:: python + :linenos: + + async def test_splits(): + """.""" + client = AsyncGqlClient( + write_schema=False, + pysui_config=PysuiConfiguration( + group_name=PysuiConfiguration.SUI_GQL_RPC_GROUP + ), + ) + ser_txne = SerialTransactionExecutor( + client=client, signer=client.config.active_address, default_gas_budget=5000000 + ) + # Tx 1 + ser_txn_1: CachingTransaction = await ser_txne.new_transaction() + scoin = await ser_txn_1.split_coin(coin=ser_txn_1.gas, amounts=[1000000000]) + await ser_txn_1.transfer_objects( + transfers=[scoin], recipient=client.config.active_address + ) + + # Tx 2 + ser_txn_2: CachingTransaction = await ser_txne.new_transaction() + scoin = await ser_txn_2.split_coin(coin=ser_txn_2.gas, amounts=[1000000000]) + await ser_txn_2.transfer_objects( + transfers=[scoin], recipient=client.config.active_address + ) + gx = await ser_txne.execute_transactions([ser_txn_1, ser_txn_2]) + for gres in gx: + print(gres.to_json(indent=2)) + +In this example the two transaction times are, respectivley: + +.. code-block:: shell + :linenos: + + serial_exec DEBUG tx execution 3.573889970779419 + serial_exec DEBUG tx execution 0.1877460479736328 diff --git a/doc/source/index.rst b/doc/source/index.rst index ffd5d2f..043d786 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -26,6 +26,7 @@ Welcome to pysui's documentation! graphql_prog_txn graphql_pyconfig graphql_qnodes + graphql_serial_exc aliases multi_sig prog_txn diff --git a/pysui/sui/sui_pgql/execute/caching_exec.py b/pysui/sui/sui_pgql/execute/caching_exec.py index df1dcce..90a282d 100644 --- a/pysui/sui/sui_pgql/execute/caching_exec.py +++ b/pysui/sui/sui_pgql/execute/caching_exec.py @@ -18,8 +18,9 @@ from .cache import AsyncObjectCache, ObjectCacheEntry from .caching_txn import CachingTransaction -logger = logging.getLogger("async_executor") -logger.setLevel(logging.DEBUG) +logger = logging.getLogger("serial_exec") +# logger = logging.getLogger("async_executor") +# logger.setLevel(logging.DEBUG) class AsyncCachingTransactionExecutor: diff --git a/pysui/sui/sui_pgql/execute/caching_tx_builder.py b/pysui/sui/sui_pgql/execute/caching_tx_builder.py index a079b4e..5d1daec 100644 --- a/pysui/sui/sui_pgql/execute/caching_tx_builder.py +++ b/pysui/sui/sui_pgql/execute/caching_tx_builder.py @@ -21,10 +21,7 @@ _SUI_PACAKGE_COMMIt_UPGRADE: str = "commit_upgrade" # Standard library logging setup -logger = logging.getLogger() -# if not logging.getLogger().handlers: -# logger.addHandler(logging.NullHandler()) -# logger.propagate = True +logger = logging.getLogger("serial_exec") @versionadded(version="0.73.0", reason="Support serialzed and parallel executions") @@ -71,7 +68,7 @@ def resolved_object_inputs( """.""" new_inputs: dict[bcs.BuilderArg, bcs.CallArg] = {} for idx, (barg, carg) in enumerate(self.inputs.items()): - if barg.enum_name is "Unresolved": + if barg.enum_name == "Unresolved": rbarg, rcarg = entries[idx] new_inputs[rbarg] = rcarg else: @@ -208,7 +205,7 @@ def make_move_vector( ) -> bcs.Argument: """Create a call to convert a list of items to a Sui 'vector' type.""" # Sample first for type - logger.debug("Creating MakeMoveVec transaction") + logger.debug("MakeMoveVec transaction") argrefs: list[bcs.Argument] = [] for arg in items: if isinstance(arg, bcs.BuilderArg): @@ -247,7 +244,7 @@ def move_call( res_count: int = 1, ) -> Union[bcs.Argument, list[bcs.Argument]]: """Setup a MoveCall command and return it's result Argument.""" - logger.debug("Creating MakeCall transaction") + logger.debug("MoveCall transaction") argrefs: list[bcs.Argument] = [] for arg in arguments: if isinstance(arg, bcs.BuilderArg): @@ -293,7 +290,7 @@ def split_coin( amounts: list[bcs.BuilderArg], ) -> bcs.Argument: """Setup a SplitCoin command and return it's result Argument.""" - logger.debug("Creating SplitCoin transaction") + logger.debug("SplitCoin transaction") amounts_arg = [] for amount in amounts: if isinstance(amount, bcs.Argument): @@ -333,7 +330,7 @@ def merge_coins( ], ) -> bcs.Argument: """Setup a MergeCoins command and return it's result Argument.""" - logger.debug("Creating MergeCoins transaction") + logger.debug("MergeCoins transaction") if isinstance(to_coin, bcs.UnresolvedObjectArg): to_coin = self.input_obj_from_unresolved_object(to_coin) @@ -377,7 +374,7 @@ def transfer_objects( ], ) -> bcs.Argument: """Setup a TransferObjects command and return it's result Argument.""" - logger.info("Creating TransferObjects transaction") + logger.info("TransferObjects transaction") receiver_arg = ( recipient if isinstance(recipient, bcs.Argument) @@ -416,7 +413,7 @@ def transfer_sui( First uses the SplitCoins result, then returns the TransferObjects result Argument. """ - logger.debug("Creating TransferSui transaction") + logger.debug("TransferSui transaction") reciever_arg = self.input_pure(recipient) if amount and isinstance(amount, bcs.BuilderArg): coin_arg = self.split_coin(from_coin=from_coin, amounts=[amount]) @@ -448,7 +445,7 @@ def publish( self, modules: list[list[bcs.U8]], dep_ids: list[bcs.Address] ) -> bcs.Argument: """Setup a Publish command and return it's result Argument.""" - logger.debug("Creating Publish transaction") + logger.debug("Publish transaction") # result = self.command(bcs.Command("Publish", bcs.Publish(modules, dep_ids))) return self.command(bcs.Command("Publish", bcs.Publish(modules, dep_ids))) @@ -459,7 +456,7 @@ def authorize_upgrade( digest: bcs.BuilderArg, ) -> bcs.Argument: """Setup a Authorize Upgrade MoveCall and return it's result Argument.""" - logger.debug("Creating UpgradeAuthorization transaction") + logger.debug("UpgradeAuthorization transaction") if isinstance(upgrade_cap, bcs.ObjectArg): ucap = self.input_obj_from_objarg(upgrade_cap) else: @@ -489,7 +486,7 @@ def publish_upgrade( upgrade_ticket: bcs.Argument, ) -> bcs.Argument: """Setup a Upgrade Command and return it's result Argument.""" - logger.debug("Creating PublishUpgrade transaction") + logger.debug("PublishUpgrade transaction") return self.command( bcs.Command( "Upgrade", @@ -501,7 +498,7 @@ def commit_upgrade( self, upgrade_cap: bcs.Argument, receipt: bcs.Argument ) -> bcs.Argument: """Setup a Commit Upgrade MoveCall and return it's result Argument.""" - logger.debug("Creating UpgradeCommit transaction") + logger.debug("UpgradeCommit transaction") return self.command( bcs.Command( "MoveCall", diff --git a/pysui/sui/sui_pgql/execute/caching_txn.py b/pysui/sui/sui_pgql/execute/caching_txn.py index 5dd4dff..598900f 100644 --- a/pysui/sui/sui_pgql/execute/caching_txn.py +++ b/pysui/sui/sui_pgql/execute/caching_txn.py @@ -6,6 +6,7 @@ """Pysui Transaction builder that leverages Sui GraphQL.""" import base64 +import logging from typing import Any, Coroutine, Optional, Union from deprecated.sphinx import versionchanged from pysui.sui.sui_pgql.pgql_clients import AsyncSuiGQLClient @@ -21,7 +22,8 @@ from .caching_tx_builder import CachingTransactionBuilder, PureInput from pysui.sui.sui_common.async_lru import AsyncLRU -# Well known parameter constructs +logger = logging.getLogger("serial_exec") +# logger = logging.getLogger("caching transaction") class CachingTransaction(txbase): diff --git a/pysui/sui/sui_pgql/execute/serial_exec.py b/pysui/sui/sui_pgql/execute/serial_exec.py index ac14c6c..7d99270 100644 --- a/pysui/sui/sui_pgql/execute/serial_exec.py +++ b/pysui/sui/sui_pgql/execute/serial_exec.py @@ -11,6 +11,9 @@ import time from typing import Any, Coroutine, Optional, Union +logger = logging.getLogger("serial_exec") +logger.setLevel(logging.DEBUG) + from pysui import AsyncGqlClient from pysui.sui.sui_pgql.pgql_txb_signing import SigningMultiSig import pysui.sui.sui_pgql.pgql_types as ptypes @@ -20,9 +23,6 @@ from .caching_txn import CachingTransaction -logger = logging.getLogger("serial_exec") -logger.setLevel(logging.DEBUG) - def _get_gascoin_from_effects(effects: bcst.TransactionEffects) -> bcs.ObjectReference: """.""" @@ -122,15 +122,25 @@ async def _build_transaction(self, transaction: CachingTransaction) -> str: async def execute_transactions( self, - transactions: list[Union[str, CachingTransaction]], + transactions: list[CachingTransaction], **kwargs, ) -> list[ptypes.ExecutionResultGQL]: - """.""" + """Serially execute one or more transactions + + :param transactions: The transactions to execute + :type transactions: list[CachingTransaction] + :raises ValueError: _description_ + :raises exc: _description_ + :return: The transaction execution results + :rtype: list[ptypes.ExecutionResultGQL] + """ exe_res: list[ptypes.ExecutionResultGQL] = [] for tx in transactions: - start_time = time.time() - tx_str = tx if isinstance(tx, str) else await self._build_transaction(tx) - logger.debug("Getting signature") + if logger.getEffectiveLevel() == logging.DEBUG: + start_time = time.time() + logger.debug("Building transaction") + tx_str = await self._build_transaction(tx) + logger.debug(f"Signing {tx_str}") # Sign the transaction sig_list: list[str] = [] if isinstance(self._signer, str): @@ -148,15 +158,16 @@ async def execute_transactions( raise ValueError("BaseMultiSig can not sign for execution") try: # TODO: Clean up using legacy pysui Signature type - logger.debug("Calling cache execute transaction") + logger.debug("Cache transaction execution") results: ptypes.ExecutionResultGQL = ( await self._cache.execute_transaction( tx_str, [x.value for x in sig_list], **kwargs ) ) await self.apply_effects(results.bcs) - end_time = time.time() - logger.debug(f"tx execution {end_time-start_time}") + if logger.getEffectiveLevel() == logging.DEBUG: + end_time = time.time() + logger.debug(f"tx execution {end_time-start_time}") exe_res.append(results) except ValueError as exc: