diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml deleted file mode 100644 index 24a790c..0000000 --- a/.github/workflows/backend.yml +++ /dev/null @@ -1,35 +0,0 @@ -# TBA -name: Backend - -on: [pull_request] - -jobs: - build: - runs-on: ubuntu-latest - steps: - - - uses: actions/checkout@v2 - - # Node.js (for package scripts) - - uses: actions/setup-node@v4 - with: - node-version: lts/* - - # Python - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - uses: snok/install-poetry@v1 - with: - version: "1.7.1" - virtualenvs-create: true - virtualenvs-in-project: false - virtualenvs-path: ~/my-custom-path - installer-parallel: true - - # Install backend dependencies - - run: yarn install:backend - - # Run backend - # - run: yarn dev:backend \ No newline at end of file diff --git a/.gitignore b/.gitignore index 214c561..85d74e6 100644 --- a/.gitignore +++ b/.gitignore @@ -54,3 +54,5 @@ local_config.json mech.db /.api_keys.json + +.metadata_hash.json \ No newline at end of file diff --git a/.metadata_hash.json.example b/.metadata_hash.json.example new file mode 100644 index 0000000..9dd6bf6 --- /dev/null +++ b/.metadata_hash.json.example @@ -0,0 +1,47 @@ +{ + "name": "Autonolas Mech Quickstart", + "description": "The mech executes AI tasks requested on-chain and delivers the results to the requester.", + "inputFormat": "ipfs-v0.1", + "outputFormat": "ipfs-v0.1", + "image": "https://gateway.autonolas.tech/ipfs/bafybeidzpenez565d7vp7jexfrwisa2wijzx6vwcffli57buznyyqkrceq", + "tools": [ + "claude-prediction-online" + ], + "toolMetadata": { + "claude-prediction-offline": { + "name": "Claude Prediction Offline", + "description": "Makes a prediction using Claude", + "input": { + "type": "text", + "description": "The text to make a prediction on" + }, + "output": { + "type": "object", + "description": "A JSON object containing the prediction and confidence", + "schema": { + "type": "object", + "properties": { + "requestId": { + "type": "integer", + "description": "Unique identifier for the request" + }, + "result": { + "type": "string", + "description": "Result information in JSON format as a string", + "example": "{\n \"p_yes\": 0.6,\n \"p_no\": 0.4,\n \"confidence\": 0.8,\n \"info_utility\": 0.6\n}" + }, + "prompt": { + "type": "string", + "description": "The prompt used to make the prediction." + } + }, + "required": [ + "requestId", + "result", + "prompt" + ] + } + } + } + } +} \ No newline at end of file diff --git a/README.md b/README.md index 3ff0fe2..31144ed 100644 --- a/README.md +++ b/README.md @@ -37,6 +37,72 @@ Ensure your machine satisfies the requirements: - You need 1 RPC for your agent instance for Gnosis. +## Setting up Mech metadata hash file + +1. Copy over the sample from .metadata_hash.json.example. The example file is valid for a single tool. + + ``` + cp .metadata_hash.json.example .metadata_hash.json + ``` + +2. Define your top level key value pairs + | Name | Value Type | Description | + | :--- | :---: | :--- | + | Name | str | Name of your mech | + | Description | str | Description of your mech | + | inputFormat | str | Can leave it default | + | outputFormat | str | Can leave it default | + | image | str | Link to the imagerepresenting your mech | + | tools | List | List of AI tools your mech supports | + | toolMetadata | Dict | Provides more info on sprecific tools | + +> [!IMPORTANT] \ +> Each tool mentioned in `tools` should have a corresponding `key` in the `toolsMetadata`. + +3. Define your key value pairs for each specific tools. + + | Name | Value Type | Description | + | :----------- | :--------: | :-------------------------------------- | + | Name | str | Name of the AI tool | + | Description | str | Description of the AI tool | + | input | Dict | Contains the input schema of the tool | + | output | Dict | Contains the output schema of the tool | + | image | str | Link to the imagerepresenting your mech | + | tools | List | List of AI tools your mech supports | + | toolMetadata | Dict | Provides more info on sprecific tools | + +> [!IMPORTANT] \ +> Each field mentioned in `required` should have a corresponding `key` in the `properties`. + +4. Define your key value pairs for the output schema + + | Name | Value Type | Description | + | :--------- | :--------: | :----------------------------------------------------------- | + | type | str | Mentions the type of the schema | + | properties | Dict | Contains the required output data | + | required | List | Contains the list of fields required in the `properties` key | + +5. Define your key value pairs for the properties field + + | Name | Value Type | Description | + | :-------- | :--------: | :------------------------------------------------------------ | + | requestId | Dict | Contains the request id and it's description | + | result | Dict | Contains the result and it's description with an example | + | prompt | Dict | Contains the prompt used for the request and it's description | + +## Setting up api keys file + +1. Copy over the sample from .api_keys.json.example. + + ``` + cp .api_keys.json.example .api_keys.json + ``` + +2. Setup key value pairs for every AI tool your mech uses + + - The name of the tool will be the `key` used in the file + - The value will be an array of valid API keys the tool can use + ## Run the Service 1. Clone this repository: @@ -53,9 +119,9 @@ Ensure your machine satisfies the requirements: ``` 3. Run the quickstart: - ```bash - python run_service.py - ``` + ```bash + bash run_service.sh + ``` When prompted, add the requested info, send funds to the prompted address and you're good to go! @@ -71,14 +137,6 @@ Please confirm your password: Creating the main wallet... ``` -## Staking - -The agent will need your answer on staking. If you plan to run it as a non staking agent, please answer _n_ to the question below. Otherwise, please answer _y_ and, consequently when prompted, fund your agent with the required number of bridged Olas in Optimism Chain. - -```bash -Do you want to stake your service? (y/n): -``` - ### Notes: - Staking is currently in a testing phase, so the number of trader agents that can be staked might be limited. diff --git a/operate/ledger/profiles.py b/operate/ledger/profiles.py index 85788b3..5f9214e 100644 --- a/operate/ledger/profiles.py +++ b/operate/ledger/profiles.py @@ -84,9 +84,7 @@ "optimus_alpha": "0x88996bbdE7f982D93214881756840cE2c77C4992", }, ChainType.ETHEREUM: {}, - ChainType.BASE: { - "meme_alpha": "0x06702A05312091013FdB50C8b60B98ca30762931" - }, + ChainType.BASE: {"meme_alpha": "0x06702A05312091013FdB50C8b60B98ca30762931"}, ChainType.CELO: { "meme_alpha": "0xf39cd0eE4C502Df7D26F28cFAdd579724A3CFCe8", }, diff --git a/operate/services/manage.py b/operate/services/manage.py index 233ed73..e10ac03 100644 --- a/operate/services/manage.py +++ b/operate/services/manage.py @@ -1129,7 +1129,7 @@ def fund_service( # pylint: disable=too-many-arguments,too-many-locals chain_id: str = "10", ) -> None: """Fund service if required.""" - from_safe = False # hack + from_safe = False # hack service = self.load_or_create(hash=hash) chain_config = service.chain_configs[chain_id] ledger_config = chain_config.ledger_config @@ -1177,7 +1177,7 @@ def fund_service( # pylint: disable=too-many-arguments,too-many-locals amount=int(to_transfer), chain_type=ledger_config.chain, rpc=rpc or ledger_config.rpc, - from_safe=False # hack + from_safe=False, # hack ) def fund_service_erc20( # pylint: disable=too-many-arguments,too-many-locals diff --git a/operate/services/service.py b/operate/services/service.py index 3bb37af..fff37fa 100644 --- a/operate/services/service.py +++ b/operate/services/service.py @@ -228,6 +228,9 @@ def try_update_runtime_params( # "on_chain_service_id" # ] = service_id + # service id not used tmp workaround + print(f"{service_id=}") + override["type"] = component_id.package_type.value override["public_id"] = str(component_id.public_id) @@ -471,21 +474,6 @@ def _build_docker( (build / volume).mkdir(exist_ok=True) _volumes.append(f"./{volume}:{mount}:Z") - # not needed and causes issues - # for node in deployment["services"]: - # if "abci" in node: - # deployment["services"][node]["volumes"].extend(_volumes) - # if ( - # "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE=0" - # in deployment["services"][node]["environment"] - # ): - # deployment["services"][node]["environment"].remove( - # "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE=0" - # ) - # deployment["services"][node]["environment"].append( - # "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE=10000000000000000" - # ) - with (build / DOCKER_COMPOSE_YAML).open("w", encoding="utf-8") as stream: yaml_dump(data=deployment, stream=stream) @@ -566,9 +554,9 @@ def _build_host(self, force: bool = True, chain_id: str = "100") -> None: # Mech price patch. agent_vars = json.loads(Path(build, "agent.json").read_text(encoding="utf-8")) if "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE" in agent_vars: - agent_vars["SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE"] = ( - "10000000000000000" - ) + agent_vars[ + "SKILL_TRADER_ABCI_MODELS_PARAMS_ARGS_MECH_REQUEST_PRICE" + ] = "10000000000000000" Path(build, "agent.json").write_text( json.dumps(agent_vars, indent=4), encoding="utf-8", @@ -806,9 +794,9 @@ def update_user_params_from_template( ) -> None: """Update user params from template.""" for chain, config in service_template["configurations"].items(): - self.chain_configs[chain].chain_data.user_params = ( - OnChainUserParams.from_json(dict(config)) - ) + self.chain_configs[ + chain + ].chain_data.user_params = OnChainUserParams.from_json(dict(config)) self.store() diff --git a/run_service.py b/run_service.py index 8f54b33..30b0a62 100644 --- a/run_service.py +++ b/run_service.py @@ -312,7 +312,6 @@ def main() -> None: # TODO: no way to update this atm after its provided, user is expected to update the file itself. "API_KEYS": json.dumps(api_keys, separators=(',', ':')), "AGENT_ID": str(mech_quickstart_config.agent_id), - # TODO this will be very unclear for the general user how to come up with "METADATA_HASH": mech_quickstart_config.metadata_hash, "MECH_TO_CONFIG": json.dumps(mech_to_config, separators=(',', ':')), "ON_CHAIN_SERVICE_ID": service.chain_configs[home_chain_id].chain_data.token, diff --git a/setup_metadata_hash.py b/setup_metadata_hash.py new file mode 100644 index 0000000..2e57332 --- /dev/null +++ b/setup_metadata_hash.py @@ -0,0 +1,248 @@ +import sys +import json +from typing import Tuple, List, Dict +import multibase +import multicodec +from aea.helpers.cid import to_v1 +from aea_cli_ipfs.ipfs_utils import IPFSTool +from utils import ( + print_title, + MechQuickstartConfig, + input_with_default_value, +) + + +metadata_schema = { + "name": str, + "description": str, + "inputFormat": str, + "outputFormat": str, + "image": str, + "tools": List, + "toolMetadata": Dict, +} + +tool_schema = { + "name": str, + "description": str, + "input": Dict, + "output": Dict, +} +tool_input_schema = { + "type": str, + "description": str, +} +tool_output_schema = {"type": str, "description": str, "schema": Dict} + +output_schema_schema = { + "properties": Dict, + "required": List, + "type": str, +} + +properties_schema = { + "requestId": Dict, + "result": Dict, + "prompt": Dict, +} + +properties_data_schema = { + "type": str, + "description": str, +} + + +def setup_metadata_hash(mech_quickstart_config: MechQuickstartConfig) -> None: + """ + Push the metadata file to IPFS. + """ + + print_title("Mech Quickstart: Metadata hash setup") + + metadata_hash_path = input_with_default_value( + "Please provide the path to your metadata_hash.json file", + "./.metadata_hash.json", + ) + + status, error_msg = __validate_metadata_file(metadata_hash_path) + if not status: + print(error_msg) + print("Please refer to .metadata_hash.json.example for reference") + sys.exit(1) + + response = IPFSTool().client.add( + metadata_hash_path, pin=True, recursive=True, wrap_with_directory=False + ) + v1_file_hash = to_v1(response["Hash"]) + cid_bytes = multibase.decode(v1_file_hash) + multihash_bytes = multicodec.remove_prefix(cid_bytes) + v1_file_hash_hex = "f01" + multihash_bytes.hex() + + mech_quickstart_config.metadata_hash = v1_file_hash_hex + + print_title("Metadata hash successfully generated and stored in config") + + +def __validate_metadata_file(file_path) -> Tuple[bool, str]: + status = False + try: + path = file_path + with open(path, "r") as f: + metadata: Dict = json.load(f) + + except FileNotFoundError: + return (status, f"Error: Metadata file not found at {file_path}") + except json.JSONDecodeError: + return (status, "Error: Metadata file contains invalid JSON.") + + for key, expected_type in metadata_schema.items(): + if key not in metadata: + return (status, f"Missing key in metadata json: '{key}'") + + if not isinstance(metadata[key], expected_type): + expected = expected_type.__name__ + actual = type(metadata[key]).__name__ + return ( + status, + f"Invalid type for key in metadata json. Expected '{expected}', but got '{actual}'", + ) + + tools = metadata["tools"] + tools_metadata = metadata["toolMetadata"] + num_of_tools = len(tools) + num_of_tools_metadata = len(tools_metadata) + + if num_of_tools != num_of_tools_metadata: + return ( + status, + f"Number of tools does not match number of keys in 'toolMetadata'. Expected {num_of_tools} but got {num_of_tools_metadata}.", + ) + + for tool in tools: + if tool not in tools_metadata: + return (status, f"Missing toolsMetadata for tool: '{tool}'") + + for key, expected_type in tool_schema.items(): + data = tools_metadata[tool] + if key not in data: + return (status, f"Missing key in toolsMetadata: '{key}'") + + if not isinstance(data[key], expected_type): + expected = expected_type.__name__ + actual = type(data[key]).__name__ + return ( + status, + f"Invalid type for key in toolsMetadata. Expected '{expected}', but got '{actual}'", + ) + + if key == "input": + for i_key, i_expected_type in tool_input_schema.items(): + input_data = data[key] + if i_key not in input_data: + return ( + status, + f"Missing key for {tool} -> input: '{i_key}'", + ) + + if not isinstance(input_data[i_key], i_expected_type): + i_expected = i_expected_type.__name__ + i_actual = type(input_data[i_key]).__name__ + return ( + status, + f"Invalid type for '{i_key}' in {tool} -> input. Expected '{i_expected}', but got '{i_actual}'.", + ) + + elif key == "output": + for o_key, o_expected_type in tool_output_schema.items(): + output_data = data[key] + if o_key not in output_data: + return ( + status, + f"Missing key for {tool} -> output: '{o_key}'", + ) + + if not isinstance(output_data[o_key], o_expected_type): + o_expected = o_expected_type.__name__ + o_actual = type(output_data[o_key]).__name__ + return ( + status, + f"Invalid type for '{o_key}' in {tool} -> output. Expected '{o_expected}', but got '{o_actual}'.", + ) + + if o_key == "schema": + for ( + s_key, + s_expected_type, + ) in output_schema_schema.items(): + output_schema_data = output_data[o_key] + if s_key not in output_schema_data: + return ( + status, + f"Missing key for {tool} -> output -> schema: '{s_key}'", + ) + + if not isinstance( + output_schema_data[s_key], s_expected_type + ): + s_expected = s_expected_type.__name__ + s_actual = type(output_schema_data[s_key]).__name__ + return ( + status, + f"Invalid type for '{s_key}' in {tool} -> output -> schema. Expected '{s_expected}', but got '{s_actual}'.", + ) + + if ( + s_key == "properties" + and "required" in output_schema_data + ): + for ( + p_key, + p_expected_type, + ) in properties_schema.items(): + properties_data = output_schema_data[s_key] + if p_key not in properties_data: + return ( + status, + f"Missing key for {tool} -> output -> schema -> properties: '{p_key}'", + ) + + if not isinstance( + properties_data[p_key], p_expected_type + ): + p_expected = p_expected_type.__name__ + p_actual = type(properties_data[p_key]).__name__ + return ( + status, + f"Invalid type for '{p_key}' in {tool} -> output -> schema -> properties. Expected '{p_expected}', but got '{p_actual}'.", + ) + + required = output_schema_data["required"] + num_of_properties_data = len(properties_data) + num_of_required = len(required) + + if num_of_properties_data != num_of_required: + return ( + status, + f"Number of properties data does not match number of keys in 'required'. Expected {num_of_required} but got {num_of_properties_data}.", + ) + + for ( + key, + expected_type, + ) in properties_data_schema.items(): + data = properties_data[p_key] + if key not in data: + return ( + status, + f"Missing key in properties -> {p_key}: '{key}'", + ) + + if not isinstance(data[key], expected_type): + expected = expected_type.__name__ + actual = type(data[key]).__name__ + return ( + status, + f"Invalid type for key in properties. Expected '{expected}', but got '{actual}'", + ) + + return (True, "") diff --git a/utils.py b/utils.py index aeb9449..ed4d2ba 100644 --- a/utils.py +++ b/utils.py @@ -39,10 +39,8 @@ WARNING_ICON = colored("\u26A0", "yellow") OPERATE_HOME = Path.cwd() / ".mech_quickstart" DEFAULT_TOOLS_TO_PACKAGE_HASH = None -DEFAULT_MECH_TO_SUBSCRIPTION = None -DEFAULT_MECH_TO_CONFIG = None DEFAULT_MECH_HASH = "bafybeig544gw6i7ahlwj6d64djlwfltjuznz3p66kmwk4m6bzqtn2bjfbq" - +DEFAULT_MECH_METADATA_HASH = "f01701220caa53607238e340da63b296acab232c18a48e954f0af6ff2b835b2d93f1962f0" @dataclass class MechQuickstartConfig(LocalResource): """Local configuration.""" @@ -348,8 +346,21 @@ def get_local_config() -> MechQuickstartConfig: load_api_keys(mech_quickstart_config) if mech_quickstart_config.metadata_hash is None: - # TODO: default value is not a good idea here, we need to think of better ways to do this. - mech_quickstart_config.metadata_hash = input_with_default_value("Please provide the metadata hash", "f01701220caa53607238e340da63b296acab232c18a48e954f0af6ff2b835b2d93f1962f0") + metadata_hash = ( + input( + f"Do you want to update the metadata_hash str(set to {DEFAULT_MECH_METADATA_HASH})? (y/n): " + ).lower() + == "y" + ) + if metadata_hash: + from setup_metadata_hash import setup_metadata_hash + while True: + setup_metadata_hash(mech_quickstart_config) + break + else: + mech_quickstart_config.metadata_hash = ( + DEFAULT_MECH_METADATA_HASH + ) if mech_quickstart_config.tools_to_packages_hash is None: tools_to_packages_hash = (