diff --git a/api.md b/api.md
index c3699280d..6a7834b36 100644
--- a/api.md
+++ b/api.md
@@ -1,58 +1,428 @@
-```ts
-ServiceHash: string
+`GET /api`
-ServiceUpdateTemplate {
- old: ServiceHash;
- new: ServiceTemplate;
+Returns information of the operate daemon
+
+
+ Response
+
+```json
+{
+ "name": "Operate HTTP server",
+ "version": "0.1.0.rc0",
+ "account": {
+ "key": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb9226a"
+ },
+ "home": "/Users/virajpatel/valory/olas-operate-app/.operate"
+}
+```
+
+
+
+---
+`GET /api/services`
+
+Returns the list of services
+
+
+ Response
+
+```json
+[
+ {
+ "hash": "bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a",
+ "keys": [
+ {
+ "ledger": 0,
+ "address": "0x6Db941e0e82feA3c02Ba83B20e3fB5Ea6ee539cf",
+ "private_key": "0x34f58dcc11acec007644e49921fd81b9c8a959f651d6d66a42242a1b2dbaf4be"
+ }
+ ],
+ "ledger_config": {
+ "rpc": "http://localhost:8545",
+ "type": 0,
+ "chain": 2
+ },
+ "chain_data": {
+ "instances": [
+ "0x6Db941e0e82feA3c02Ba83B20e3fB5Ea6ee539cf"
+ ],
+ "token": 380,
+ "multisig": "0x7F3e460Cf596E783ca490791643C0055Fa2034AC",
+ "staked": false,
+ "on_chain_state": 6,
+ "user_params": {
+ "nft": "bafybeig64atqaladigoc3ds4arltdu63wkdrk3gesjfvnfdmz35amv7faq",
+ "agent_id": 14,
+ "threshold": 1,
+ "use_staking": false,
+ "cost_of_bond": 10000000000000000,
+ "olas_cost_of_bond": 10000000000000000000,
+ "olas_required_to_stake": 10000000000000000000,
+ "fund_requirements": {
+ "agent": 0.1,
+ "safe": 0.5
+ }
+ }
+ },
+ "path": "/Users/virajpatel/valory/olas-operate-app/.operate/services/bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a",
+ "service_path": "/Users/virajpatel/valory/olas-operate-app/.operate/services/bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a/trader_omen_gnosis",
+ "name": "valory/trader_omen_gnosis"
+ }
+]
+```
+
+
+
+---
+`POST /api/services`
+
+Create a service using the service template
+
+
+ Request
+
+```json
+{
+ "name": "Trader Agent",
+ "description": "Trader agent for omen prediction markets",
+ "hash": "bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a",
+ "image": "https://operate.olas.network/_next/image?url=%2Fimages%2Fprediction-agent.png&w=3840&q=75",
+ "configuration": {
+ "nft": "bafybeig64atqaladigoc3ds4arltdu63wkdrk3gesjfvnfdmz35amv7faq",
+ "rpc": "http://localhost:8545",
+ "agent_id": 14,
+ "threshold": 1,
+ "use_staking": false,
+ "cost_of_bond": 10000000000000000,
+ "olas_cost_of_bond": 10000000000000000000,
+ "olas_required_to_stake": 10000000000000000000,
+ "fund_requirements": {
+ "agent": 0.1,
+ "safe": 0.5
+ }
+ }
+}
+```
+
+
+
+Optionally you can add `deploy` parameter and set it to `true` for a full deployment in a single request.
+
+
+ Response
+
+```json
+{
+ "hash": "bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a",
+ "keys": [
+ {
+ "ledger": 0,
+ "address": "0x10EB940024913dfCAE95D21E04Ba662cdfB79fF0",
+ "private_key": "0x00000000000000000000000000000000000000000000000000000000000000000"
+ }
+ ],
+ "ledger_config": {
+ "rpc": "http: //localhost:8545",
+ "type": 0,
+ "chain": 2
+ },
+ "chain_data": {
+ "instances": [
+ "0x10EB940024913dfCAE95D21E04Ba662cdfB79fF0"
+ ],
+ "token": 382,
+ "multisig": "0xf21d8A424e83BBa2588306D1C574FE695AD410b5",
+ "staked": false,
+ "on_chain_state": 4,
+ "user_params": {
+ "nft": "bafybeig64atqaladigoc3ds4arltdu63wkdrk3gesjfvnfdmz35amv7faq",
+ "agent_id": 14,
+ "threshold": 1,
+ "use_staking": false,
+ "cost_of_bond": 10000000000000000,
+ "olas_cost_of_bond": 10000000000000000000,
+ "olas_required_to_stake": 10000000000000000000,
+ "fund_requirements": {
+ "agent": 0.1,
+ "safe": 0.5
+ }
+ }
+ },
+ "path": "~/.operate/services/bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a",
+ "service_path": "~/.operate/services/bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a/trader_omen_gnosis",
+ "name": "valory/trader_omen_gnosis"
+}
+```
+
+
+
+---
+`PUT /api/services`
+
+Update a service
+
+
+
+ Request
+
+```json
+{
+ "old_service_hash": "bafybeieagxzdbmea3nttlve3yxjne5z7tt7mp26tfpgepm7p2ezovtdx4a",
+ "new_service_hash": "bafybeicxdpkuk5z5zfbkso7v5pywf4v7chxvluyht7dtgalg6dnhl7ejoe",
+}
+```
+
+
+
+Optionally you can add `deploy` parameter and set it to `true` for a full deployment in a single request.
+
+
+ Response
+
+```json
+{
+ "hash": "bafybeicxdpkuk5z5zfbkso7v5pywf4v7chxvluyht7dtgalg6dnhl7ejoe",
+ "keys": [
+ {
+ "ledger": 0,
+ "address": "0x10EB940024913dfCAE95D21E04Ba662cdfB79fF0",
+ "private_key": "0x00000000000000000000000000000000000000000000000000000000000000000"
+ }
+ ],
+ "ledger_config": {
+ "rpc": "http: //localhost:8545",
+ "type": 0,
+ "chain": 2
+ },
+ "chain_data": {
+ "instances": [
+ "0x10EB940024913dfCAE95D21E04Ba662cdfB79fF0"
+ ],
+ "token": 382,
+ "multisig": "0xf21d8A424e83BBa2588306D1C574FE695AD410b5",
+ "staked": false,
+ "on_chain_state": 4,
+ "user_params": {
+ "nft": "bafybeig64atqaladigoc3ds4arltdu63wkdrk3gesjfvnfdmz35amv7faq",
+ "agent_id": 14,
+ "threshold": 1,
+ "use_staking": false,
+ "cost_of_bond": 10000000000000000,
+ "olas_cost_of_bond": 10000000000000000000,
+ "olas_required_to_stake": 10000000000000000000,
+ "fund_requirements": {
+ "agent": 0.1,
+ "safe": 0.5
+ }
+ }
+ },
+ "path": "~/.operate/services/bafybeicxdpkuk5z5zfbkso7v5pywf4v7chxvluyht7dtgalg6dnhl7ejoe",
+ "service_path": "~/.operate/services/bafybeicxdpkuk5z5zfbkso7v5pywf4v7chxvluyht7dtgalg6dnhl7ejoe/trader_omen_gnosis",
+ "name": "valory/trader_omen_gnosis"
+}
+```
+
+
+
+---
+`GET /api/services/{service}`
+
+
+ Response
+
+```json
+{
+ "hash": "{service}",
+ "keys": [
+ {
+ "ledger": 0,
+ "address": "0x10EB940024913dfCAE95D21E04Ba662cdfB79fF0",
+ "private_key": "0x00000000000000000000000000000000000000000000000000000000000000000"
+ }
+ ],
+ "ledger_config": {
+ "rpc": "http: //localhost:8545",
+ "type": 0,
+ "chain": 2
+ },
+ "chain_data": {
+ "instances": [
+ "0x10EB940024913dfCAE95D21E04Ba662cdfB79fF0"
+ ],
+ "token": 382,
+ "multisig": "0xf21d8A424e83BBa2588306D1C574FE695AD410b5",
+ "staked": false,
+ "on_chain_state": 4,
+ "user_params": {
+ "nft": "bafybeig64atqaladigoc3ds4arltdu63wkdrk3gesjfvnfdmz35amv7faq",
+ "agent_id": 14,
+ "threshold": 1,
+ "use_staking": false,
+ "cost_of_bond": 10000000000000000,
+ "olas_cost_of_bond": 10000000000000000000,
+ "olas_required_to_stake": 10000000000000000000,
+ "fund_requirements": {
+ "agent": 0.1,
+ "safe": 0.5
+ }
+ }
+ },
+ "path": "~/.operate/services/{service}",
+ "service_path": "~/.operate/services/{service}/trader_omen_gnosis",
+ "name": "valory/trader_omen_gnosis"
+}
+```
+
+
+
+---
+`GET /api/services/{service}/deployment`
+
+
+ Response
+
+```json
+{
+ "status": 1,
+ "nodes": {
+ "agent": [
+ "traderomengnosis_abci_0"
+ ],
+ "tendermint": [
+ "traderomengnosis_tm_0"
+ ]
+ }
}
```
-* Get the list of available services
-```js
-GET /api/services -> Services
+
+
+---
+`POST /api/services/{service}/onchain/deploy`
+
+Deploy service on-chain
+
+
+ Request
+
+```json
```
-* Create a new service from template
-```js
-POST /api/services -> ServiceTemplate -> Services
+
+
+
+ Response
+
+```json
```
-* Update a service template
-```js
-PUT /api/services -> ServiceUpdateTemplate -> Services
+
+
+---
+`POST /api/services/{service}/onchain/stop`
+
+Stop service on-chain
+
+
+ Request
+
+```json
```
-* Delete services
-```js
-DELETE /api/services -> Array -> Array
+
+
+
+ Response
+
+```json
```
-* Get a service
-```js
-GET /api/services/{ServiceHash} -> Services
+
+
+---
+`POST /api/services/{service}/deployment/build`
+
+Build service locally
+
+
+ Request
+
+```json
```
-* Start a service
-```js
-POST /api/services/{ServiceHash}/deploy -> Deployment
+
+
+
+ Response
+
+```json
```
-* Stop a service
-```js
-POST /api/services/{ServiceHash}/stop -> Deployment
+
+
+---
+`POST /api/services/{service}/deployment/start`
+
+Start agent
+
+
+ Request
+
+```json
+```
+
+
+
+
+ Response
+
+```json
+```
+
+
+
+---
+`POST /api/services/{service}/deployment/stop`
+
+Stop agent
+
+```json
+```
+
+---
+`POST /api/services/{service}/deployment/delete`
+
+Delete local deployment
+
+
+ Request
+
+```json
```
-* Get deployment status
-```js
-POST /api/services/{ServiceHash}/status -> Deployment
+
+
+
+ Response
+
+```json
```
-* Update a service by hash
-```js
-PUT /api/services/{ServiceHash} -> ServiceUpdate -> Services
+
+
+
diff --git a/operate/cli.py b/operate/cli.py
index 626bf2112..1223a451e 100644
--- a/operate/cli.py
+++ b/operate/cli.py
@@ -19,58 +19,67 @@
"""Operate app CLI module."""
+import logging
import os
+import traceback
import typing as t
from pathlib import Path
+from aea.helpers.logging import setup_logger
from aea_ledger_ethereum.ethereum import EthereumCrypto
from clea import group, params, run
-from starlette.applications import Starlette
-from starlette.middleware import Middleware
-from starlette.middleware.cors import CORSMiddleware
-from starlette.routing import Route
+from fastapi import FastAPI, Request
+from fastapi.middleware.cors import CORSMiddleware
+from fastapi.responses import JSONResponse
from typing_extensions import Annotated
from uvicorn.main import run as uvicorn
+from operate import services
from operate.constants import KEY, KEYS, OPERATE, SERVICES
-from operate.http import Resource
-from operate.keys import Keys
-from operate.services.manage import Services
DEFAULT_HARDHAT_KEY = (
"0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
).encode()
+DEFAULT_MAX_RETRIES = 3
-class App(Resource):
- """App resource."""
+class OperateApp:
+ """Operate app."""
- def __init__(self, home: t.Optional[Path] = None) -> None:
+ def __init__(
+ self,
+ home: t.Optional[Path] = None,
+ logger: t.Optional[logging.Logger] = None,
+ ) -> None:
"""Initialize object."""
super().__init__()
- self._path = (home or (Path.home() / OPERATE)).resolve()
+ self._path = (home or (Path.cwd() / OPERATE)).resolve()
self._services = self._path / SERVICES
self._keys = self._path / KEYS
- self._key = self._path / KEY
+ self._master_key = self._path / KEY
+ self.setup()
- self.make()
-
- self.keys = Keys(path=self._keys)
- self.services = Services(
+ self.logger = logger or setup_logger(name="operate")
+ self.keys_manager = services.manage.KeysManager(
+ path=self._keys,
+ logger=self.logger,
+ )
+ self.service_manager = services.manage.ServiceManager(
path=self._services,
- keys=self.keys,
- key=self._key,
+ keys_manager=self.keys_manager,
+ master_key_path=self._master_key,
+ logger=self.logger,
)
- def make(self) -> None:
+ def setup(self) -> None:
"""Make the root directory."""
self._path.mkdir(exist_ok=True)
self._services.mkdir(exist_ok=True)
self._keys.mkdir(exist_ok=True)
- if not self._key.exists():
+ if not self._master_key.exists():
# TODO: Add support for multiple master keys
- self._key.write_bytes(
+ self._master_key.write_bytes(
DEFAULT_HARDHAT_KEY
if os.environ.get("DEV", "false") == "true"
else EthereumCrypto().private_key.encode()
@@ -83,12 +92,196 @@ def json(self) -> dict:
"name": "Operate HTTP server",
"version": "0.1.0.rc0",
"account": {
- "key": EthereumCrypto(self._key).address,
+ "key": EthereumCrypto(self._master_key).address,
},
"home": str(self._path),
}
+def create_app( # pylint: disable=too-many-locals, unused-argument
+ home: t.Optional[Path] = None,
+) -> FastAPI:
+ """Create FastAPI object."""
+
+ logger = setup_logger(name="operate")
+ operate = OperateApp(home=home, logger=logger)
+ app = FastAPI()
+
+ app.add_middleware(
+ CORSMiddleware,
+ allow_origins=["*"],
+ allow_methods=["GET", "POST", "PUT", "DELETE"],
+ )
+
+ def with_retries(f: t.Callable) -> t.Callable:
+ """Retries decorator."""
+
+ async def _call(request: Request) -> JSONResponse:
+ """Call the endpoint."""
+ logger.info(f"Calling `{f.__name__}` with retries enabled")
+ retries = 0
+ errors = []
+ while retries < DEFAULT_MAX_RETRIES:
+ try:
+ return await f(request)
+ except Exception as e: # pylint: disable=broad-except
+ errors.append(
+ {"error": str(e), "traceback": traceback.format_exc()}
+ )
+ logger.error(f"Error {e}\n{traceback.format_exc()}")
+ retries += 1
+ return {"errors": errors}
+
+ return _call
+
+ @app.get("/api")
+ @with_retries
+ async def _get_api(request: Request) -> JSONResponse:
+ """Get API info."""
+ return JSONResponse(content=operate.json)
+
+ @app.get("/api/services")
+ @with_retries
+ async def _get_services(request: Request) -> JSONResponse:
+ """Get available services."""
+ return JSONResponse(content=operate.service_manager.json)
+
+ @app.post("/api/services")
+ @with_retries
+ async def _create_services(request: Request) -> JSONResponse:
+ """Create a service."""
+ template = await request.json()
+ service = operate.service_manager.create_or_load(
+ hash=template["hash"],
+ rpc=template["configuration"]["rpc"],
+ on_chain_user_params=services.manage.OnChainUserParams.from_json(
+ template["configuration"]
+ ),
+ )
+ if template.get("deploy", False):
+ operate.service_manager.deploy_service_onchain(hash=service.hash)
+ operate.service_manager.stake_service_on_chain(hash=service.hash)
+ service.deployment.build()
+ service.deployment.start()
+ return JSONResponse(
+ content=operate.service_manager.create_or_load(hash=service.hash).json
+ )
+
+ @app.put("/api/services")
+ @with_retries
+ async def _update_services(request: Request) -> JSONResponse:
+ """Create a service."""
+ template = await request.json()
+ service = operate.service_manager.update_service(
+ old_hash=template["old_service_hash"],
+ new_hash=template["new_service_hash"],
+ )
+ if template.get("deploy", False):
+ operate.service_manager.deploy_service_onchain(hash=service.hash)
+ operate.service_manager.stake_service_on_chain(hash=service.hash)
+ service.deployment.build()
+ service.deployment.start()
+ return JSONResponse(content=service.json)
+
+ @app.get("/api/services/{service}")
+ @with_retries
+ async def _get_service(request: Request) -> JSONResponse:
+ """Create a service."""
+ return JSONResponse(
+ content=operate.service_manager.create_or_load(
+ hash=request.path_params["service"],
+ ).json
+ )
+
+ @app.post("/api/services/{service}/onchain/deploy")
+ @with_retries
+ async def _deploy_service_onchain(request: Request) -> JSONResponse:
+ """Create a service."""
+ operate.service_manager.deploy_service_onchain(
+ hash=request.path_params["service"]
+ )
+ operate.service_manager.stake_service_on_chain(
+ hash=request.path_params["service"]
+ )
+ return JSONResponse(
+ content=operate.service_manager.create_or_load(
+ hash=request.path_params["service"]
+ ).json
+ )
+
+ @app.post("/api/services/{service}/onchain/stop")
+ @with_retries
+ async def _stop_service_onchain(request: Request) -> JSONResponse:
+ """Create a service."""
+ operate.service_manager.terminate_service_on_chain(
+ hash=request.path_params["service"]
+ )
+ operate.service_manager.unbond_service_on_chain(
+ hash=request.path_params["service"]
+ )
+ operate.service_manager.unstake_service_on_chain(
+ hash=request.path_params["service"]
+ )
+ return JSONResponse(
+ content=operate.service_manager.create_or_load(
+ hash=request.path_params["service"]
+ ).json
+ )
+
+ @app.get("/api/services/{service}/deployment")
+ @with_retries
+ async def _get_service_deployment(request: Request) -> JSONResponse:
+ """Create a service."""
+ return JSONResponse(
+ content=operate.service_manager.create_or_load(
+ request.path_params["service"],
+ ).deployment.json
+ )
+
+ @app.post("/api/services/{service}/deployment/build")
+ @with_retries
+ async def _build_service_locally(request: Request) -> JSONResponse:
+ """Create a service."""
+ deployment = operate.service_manager.create_or_load(
+ request.path_params["service"],
+ ).deployment
+ deployment.build()
+ return JSONResponse(content=deployment.json)
+
+ @app.post("/api/services/{service}/deployment/start")
+ @with_retries
+ async def _start_service_locally(request: Request) -> JSONResponse:
+ """Create a service."""
+ deployment = operate.service_manager.create_or_load(
+ request.path_params["service"],
+ ).deployment
+ deployment.build()
+ deployment.start()
+ return JSONResponse(content=deployment.json)
+
+ @app.post("/api/services/{service}/deployment/stop")
+ @with_retries
+ async def _stop_service_locally(request: Request) -> JSONResponse:
+ """Create a service."""
+ deployment = operate.service_manager.create_or_load(
+ request.path_params["service"],
+ ).deployment
+ deployment.stop()
+ return JSONResponse(content=deployment.json)
+
+ @app.post("/api/services/{service}/deployment/delete")
+ @with_retries
+ async def _delete_service_locally(request: Request) -> JSONResponse:
+ """Create a service."""
+ deployment = operate.service_manager.create_or_load(
+ request.path_params["service"],
+ ).deployment
+ deployment.delete()
+ return JSONResponse(content=deployment.json)
+
+ return app
+
+
@group(name="operate")
def _operate() -> None:
"""Operate - deploy autonomous services."""
@@ -103,24 +296,8 @@ def _daemon(
] = None,
) -> None:
"""Launch operate daemon."""
- app = App(home=home)
uvicorn(
- app=Starlette(
- debug=True,
- routes=[
- Route("/api", app),
- Route("/api/services", app.services),
- Route("/api/services/{service}", app.services),
- Route("/api/services/{service}/{action}", app.services),
- ],
- middleware=[
- Middleware(
- CORSMiddleware,
- allow_origins=["*"],
- allow_methods=("GET", "POST", "PUT", "DELETE"),
- )
- ],
- ),
+ app=create_app(home=home),
host=host,
port=port,
)
diff --git a/operate/keys.py b/operate/keys.py
index e1855918e..146210d3f 100644
--- a/operate/keys.py
+++ b/operate/keys.py
@@ -18,40 +18,83 @@
# ------------------------------------------------------------------------------
"""Keys manager."""
+
import json
+import logging
import os
+import typing as t
+from dataclasses import dataclass
from pathlib import Path
+from aea.helpers.logging import setup_logger
from aea_ledger_ethereum.ethereum import EthereumCrypto
-from operate.types import KeyType
+from operate.resource import LocalResource
+from operate.types import LedgerType
+
+
+@dataclass
+class Key(LocalResource):
+ """Key resource."""
+
+ ledger: LedgerType
+ address: str
+ private_key: str
+
+ @classmethod
+ def load(cls, path: Path) -> "Key":
+ """Load a service"""
+ return super().load(path) # type: ignore
-class Keys:
+Keys = t.List[Key]
+
+
+class KeysManager:
"""Keys manager."""
- def __init__(self, path: Path) -> None:
- """Initialize object."""
- self._path = path
+ def __init__(
+ self,
+ path: Path,
+ logger: t.Optional[logging.Logger] = None,
+ ) -> None:
+ """
+ Initialize keys manager
+
+ :param path: Path to keys storage.
+ :param logger: logging.Logger object.
+ """
+ self.path = path
+ self.logger = logger or setup_logger(name="operate.keys")
- def get(self, key: str) -> KeyType:
+ def setup(self) -> None:
+ """Setup service manager."""
+ self.path.mkdir(exist_ok=True)
+
+ def get(self, key: str) -> Key:
"""Get key object."""
- return json.loads((self._path / key).read_text(encoding="utf-8"))
+ return Key.from_json( # type: ignore
+ obj=json.loads(
+ (self.path / key).read_text(
+ encoding="utf-8",
+ )
+ )
+ )
def create(self) -> str:
"""Creates new key."""
crypto = EthereumCrypto()
- path = self._path / crypto.address
+ path = self.path / crypto.address
if path.is_file():
return crypto.address
path.write_text(
json.dumps(
- {
- "address": crypto.address,
- "private_key": crypto.private_key,
- "ledger": "ethereum",
- },
+ Key(
+ ledger=LedgerType.ETHEREUM,
+ address=crypto.address,
+ private_key=crypto.private_key,
+ ).json,
indent=4,
),
encoding="utf-8",
@@ -60,4 +103,4 @@ def create(self) -> str:
def delete(self, key: str) -> None:
"""Delete key."""
- os.remove(self._path / key)
+ os.remove(self.path / key)
diff --git a/operate/resource.py b/operate/resource.py
new file mode 100644
index 000000000..269521849
--- /dev/null
+++ b/operate/resource.py
@@ -0,0 +1,126 @@
+# -*- coding: utf-8 -*-
+# ------------------------------------------------------------------------------
+#
+# Copyright 2024 Valory AG
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# ------------------------------------------------------------------------------
+
+"""Local resource representation."""
+
+import enum
+import json
+import typing as t
+from dataclasses import asdict, is_dataclass
+from pathlib import Path
+
+
+# pylint: disable=too-many-return-statements,no-member
+
+
+def serialize(obj: t.Any) -> t.Any:
+ """Serialize object."""
+ if is_dataclass(obj):
+ return asdict(obj)
+ if isinstance(obj, Path):
+ return str(obj)
+ if isinstance(obj, dict):
+ return {key: serialize(obj=value) for key, value in obj.items()}
+ if isinstance(obj, list):
+ return [serialize(obj=value) for value in obj]
+ if isinstance(obj, enum.Enum):
+ return obj.value
+ return obj
+
+
+def deserialize(obj: t.Any, otype: t.Any) -> t.Any:
+ """Desrialize a json object."""
+ base = getattr(otype, "__class__") # noqa: B009
+ if base.__name__ == "_GenericAlias": # type: ignore
+ args = otype.__args__ # type: ignore
+ if len(args) == 1:
+ (atype,) = args
+ return [deserialize(arg, atype) for arg in obj]
+ if len(args) == 2:
+ (ktype, vtype) = args
+ return {
+ deserialize(key, ktype): deserialize(val, vtype)
+ for key, val in obj.items()
+ }
+ return obj
+ if base is enum.EnumMeta:
+ return otype(obj)
+ if otype is Path:
+ return Path(obj)
+ if is_dataclass(otype):
+ return otype.from_json(obj)
+ return obj
+
+
+class LocalResource:
+ """Initialize local resource."""
+
+ _file: t.Optional[str] = None
+
+ def __init__(self, path: t.Optional[Path] = None) -> None:
+ """Initialize local resource."""
+ self.path = path
+
+ @property
+ def json(self) -> t.Dict:
+ """To dictionary object."""
+ obj = {}
+ for pname, _ in self.__annotations__.items():
+ if pname.startswith("_"):
+ continue
+ obj[pname] = serialize(self.__dict__[pname])
+ return obj
+
+ @classmethod
+ def from_json(cls, obj: t.Dict) -> "LocalResource":
+ """Load LocalResource from json."""
+ kwargs = {}
+ for pname, ptype in cls.__annotations__.items():
+ if pname.startswith("_"):
+ continue
+ kwargs[pname] = deserialize(obj=obj[pname], otype=ptype)
+ return cls(**kwargs)
+
+ @classmethod
+ def load(cls, path: Path) -> "LocalResource":
+ """Load local resource."""
+ file = (
+ path / cls._file
+ if cls._file is not None and path.name != cls._file
+ else path
+ )
+ data = json.loads(file.read_text(encoding="utf-8"))
+ return cls.from_json(obj={**data, "path": path})
+
+ def store(self) -> None:
+ """Store local resource."""
+ if self.path is None:
+ raise RuntimeError(f"Cannot save {self}; Path value not provided.")
+
+ path = self.path
+ if self._file is not None:
+ path = path / self._file
+
+ path.write_text(
+ json.dumps(
+ self.json,
+ indent=2,
+ ),
+ encoding="utf-8",
+ )
diff --git a/operate/services/__init__.py b/operate/services/__init__.py
index b7bf72ac3..d8ea08f98 100644
--- a/operate/services/__init__.py
+++ b/operate/services/__init__.py
@@ -18,3 +18,8 @@
# ------------------------------------------------------------------------------
"""Services endpoint."""
+
+from . import manage
+
+
+__all__ = ("manage",)
diff --git a/operate/services/manage.py b/operate/services/manage.py
index 22c0a70da..c613f0980 100644
--- a/operate/services/manage.py
+++ b/operate/services/manage.py
@@ -20,39 +20,27 @@
"""Service manager."""
import logging
-import os
-import shutil
import typing as t
from pathlib import Path
from aea.helpers.base import IPFSHash
+from aea.helpers.logging import setup_logger
from autonomy.chain.base import registry_contracts
-from autonomy.deploy.constants import (
- AGENT_KEYS_DIR,
- BENCHMARKS_DIR,
- LOG_DIR,
- PERSISTENT_DATA_DIR,
- TM_STATE_DIR,
- VENVS_DIR,
-)
-from starlette.types import Receive, Scope, Send
-from typing_extensions import TypedDict
-from operate.http import Resource
-from operate.http.exceptions import BadRequest
-from operate.keys import Keys
+from operate.keys import Key, KeysManager
from operate.ledger.profiles import CONTRACTS, OLAS, STAKING
from operate.services.protocol import OnChainManager
-from operate.services.service import Service
-from operate.types import (
- ChainData,
- ConfigurationTemplate,
- ServiceTemplate,
- ServiceType,
- ServicesType,
+from operate.services.service import (
+ Deployment,
+ OnChainData,
+ OnChainState,
+ OnChainUserParams,
+ Service,
)
+# pylint: disable=redefined-builtin
+
OPERATE = ".operate"
CONFIG = "config.json"
SERVICES = "services"
@@ -65,81 +53,36 @@
SERVICE_YAML = "service.yaml"
-def build_dirs(build_dir: Path) -> None:
- """Build necessary directories."""
-
- for dir_path in [
- (PERSISTENT_DATA_DIR,),
- (PERSISTENT_DATA_DIR, LOG_DIR),
- (PERSISTENT_DATA_DIR, TM_STATE_DIR),
- (PERSISTENT_DATA_DIR, BENCHMARKS_DIR),
- (PERSISTENT_DATA_DIR, VENVS_DIR),
- (AGENT_KEYS_DIR,),
- ]:
- path = Path(build_dir, *dir_path)
- path.mkdir()
- try:
- os.chown(path, 1000, 1000)
- except (PermissionError, AttributeError):
- continue
-
-
-class PostServices(ServiceTemplate):
- """Create payload."""
-
-
-class PutServices(TypedDict):
- """Create payload."""
-
- old: str
- new: ServiceTemplate
-
-
-class DeleteServicesPayload(TypedDict):
- """Create payload."""
-
- hashes: t.List[str]
-
-
-class DeleteServicesResponse(TypedDict):
- """Create payload."""
-
- hashes: t.List[str]
+class ServiceManager:
+ """Service manager."""
-
-class Services(
- Resource[
- ServicesType,
- PostServices,
- ServiceType,
- PutServices,
- ServiceType,
- DeleteServicesPayload,
- ServicesType,
- ]
-):
- """Services resource."""
-
- def __init__(self, path: Path, keys: Keys, key: Path) -> None:
- """Initialze object."""
- super().__init__()
- self.path = path
- self.keys = keys
- self.key = key
-
- async def access(
+ def __init__(
self,
- params: t.Dict,
- scope: Scope,
- receive: Receive,
- send: Send,
+ path: Path,
+ keys_manager: KeysManager,
+ master_key_path: Path,
+ logger: t.Optional[logging.Logger] = None,
) -> None:
- """Access service resource."""
- resource = Service.load(self.path / params.pop("service"))
- await resource(scope=scope, receive=receive, send=send)
+ """
+ Initialze service manager
+
+ :param path: Path to service storage.
+ :param keys: Keys manager.
+ :param master_key_path: Path to master key.
+ :param logger: logging.Logger object.
+ """
+ self.path = path
+ self.keys_manager = keys_manager
+ self.master_key_path = master_key_path
+ self.logger = logger or setup_logger(name="operate.manager")
+
+ def setup(self) -> None:
+ """Setup service manager."""
+ self.path.mkdir(exist_ok=True)
+ self.keys_manager.setup()
@property
- def json(self) -> ServicesType:
+ def json(self) -> t.List[t.Dict]:
"""Returns the list of available services."""
data = []
for path in self.path.iterdir():
@@ -147,201 +90,336 @@ def json(self) -> ServicesType:
data.append(service.json)
return data
- def _stake(self) -> None:
- """Stake a service."""
+ def get_on_chain_manager(self, service: Service) -> OnChainManager:
+ """Get OnChainManager instance."""
+ return OnChainManager(
+ rpc=service.ledger_config.rpc,
+ key=self.master_key_path,
+ contracts=CONTRACTS[service.ledger_config.chain],
+ )
- def _create(
+ def create_or_load(
self,
- phash: str,
- configuration: ConfigurationTemplate,
- instances: t.Optional[t.List[str]] = None,
- update_token: t.Optional[int] = None,
- reuse_multisig: bool = False,
+ hash: str,
+ rpc: t.Optional[str] = None,
+ on_chain_user_params: t.Optional[OnChainUserParams] = None,
+ keys: t.Optional[t.List[Key]] = None,
) -> Service:
- """Create a new service."""
- if (self.path / phash).exists(): # For testing only
- shutil.rmtree(self.path / phash)
-
- logging.info(f"Fetching service {phash}")
- service = Service.new(
- path=self.path,
- phash=phash,
- keys=[],
- chain_data=ChainData(),
- ledger={},
- )
+ """
+ Create or load a service
+
+ :param hash: Service hash
+ :param rpc: RPC string
+ """
+ path = self.path / hash
+ if path.exists():
+ return Service.load(path=path)
+
+ if rpc is None:
+ raise ValueError("RPC cannot be None when creating a new service")
+
+ if on_chain_user_params is None:
+ raise ValueError(
+ "On-chain user parameters cannot be None when creating a new service"
+ )
- ledger = service.helper.ledger_config()
- instances = instances or [
- self.keys.create() for _ in range(service.helper.config.number_of_agents)
- ]
- ocm = OnChainManager(
- rpc=configuration["rpc"],
- key=self.key,
- contracts=CONTRACTS[ledger["chain"]],
+ return Service.new(
+ hash=hash,
+ keys=keys or [],
+ rpc=rpc,
+ storage=self.path,
+ on_chain_user_params=on_chain_user_params,
)
- if configuration["use_staking"] and not ocm.staking_slots_available(
- staking_contract=STAKING[ledger["chain"]]
+ def deploy_service_onchain(self, hash: str) -> None:
+ """
+ Deploy as service on-chain
+
+ :param hash: Service hash
+ """
+ self.logger.info("Loading service")
+ service = self.create_or_load(hash=hash)
+ user_params = service.chain_data.user_params
+ update = service.chain_data.token != -1
+ keys = service.keys or [
+ self.keys_manager.get(self.keys_manager.create())
+ for _ in range(service.helper.config.number_of_agents)
+ ]
+ instances = [key.address for key in keys]
+ ocm = self.get_on_chain_manager(service=service)
+ if user_params.use_staking and not ocm.staking_slots_available(
+ staking_contract=STAKING[service.ledger_config.chain]
):
raise ValueError("No staking slots available")
- if configuration["use_staking"]:
+ if user_params.use_staking:
+ self.logger.info("Checking staking compatibility")
required_olas = (
- configuration["olas_cost_of_bond"]
- + configuration["olas_required_to_stake"]
+ user_params.olas_cost_of_bond + user_params.olas_required_to_stake
)
balance = (
registry_contracts.erc20.get_instance(
ledger_api=ocm.ledger_api,
- contract_address=OLAS[ledger["chain"]],
+ contract_address=OLAS[service.ledger_config.chain],
)
.functions.balanceOf(ocm.crypto.address)
.call()
)
if balance < required_olas:
- raise BadRequest(
+ raise ValueError(
"You don't have enough olas to stake, "
f"required olas: {required_olas}; your balance {balance}"
)
- # Update to user provided RPC
- ledger["rpc"] = configuration["rpc"]
-
- logging.info(f"Minting service {phash}")
- service_id = t.cast(
- int,
- ocm.mint(
- package_path=service.service_path,
- agent_id=configuration["agent_id"],
- number_of_slots=service.helper.config.number_of_agents,
- cost_of_bond=(
- configuration["olas_cost_of_bond"]
- if configuration["use_staking"]
- else configuration["cost_of_bond"]
+ if service.chain_data.on_chain_state == OnChainState.NOTMINTED:
+ self.logger.info("Minting service")
+ service.chain_data.token = t.cast(
+ int,
+ ocm.mint(
+ package_path=service.service_path,
+ agent_id=user_params.agent_id,
+ number_of_slots=service.helper.config.number_of_agents,
+ cost_of_bond=(
+ user_params.olas_cost_of_bond
+ if user_params.use_staking
+ else user_params.cost_of_bond
+ ),
+ threshold=user_params.threshold,
+ nft=IPFSHash(user_params.nft),
+ update_token=service.chain_data.token if update else None,
+ token=(
+ OLAS[service.ledger_config.chain]
+ if user_params.use_staking
+ else None
+ ),
+ ).get("token"),
+ )
+ service.chain_data.on_chain_state = OnChainState.MINTED
+ service.store()
+ else:
+ self.logger.info("Service already minted")
+
+ if service.chain_data.on_chain_state == OnChainState.MINTED:
+ self.logger.info("Activating service")
+ ocm.activate(
+ service_id=service.chain_data.token,
+ token=(
+ OLAS[service.ledger_config.chain]
+ if user_params.use_staking
+ else None
),
- threshold=configuration["threshold"],
- nft=IPFSHash(configuration["nft"]),
- update_token=update_token,
- token=OLAS[ledger["chain"]] if configuration["use_staking"] else None,
- ).get("token"),
+ )
+ service.chain_data.on_chain_state = OnChainState.ACTIVATED
+ service.store()
+ else:
+ self.logger.info("Service already activated")
+
+ if service.chain_data.on_chain_state == OnChainState.ACTIVATED:
+ self.logger.info("Registering service")
+ ocm.register(
+ service_id=service.chain_data.token,
+ instances=instances,
+ agents=[user_params.agent_id for _ in instances],
+ )
+ service.chain_data.on_chain_state = OnChainState.REGISTERED
+ service.keys = keys
+ service.store()
+ else:
+ self.logger.info("Service already registered")
+
+ if service.chain_data.on_chain_state == OnChainState.REGISTERED:
+ self.logger.info("Deploying service")
+ ocm.deploy(
+ service_id=service.chain_data.token,
+ reuse_multisig=update,
+ token=(
+ OLAS[service.ledger_config.chain]
+ if user_params.use_staking
+ else None
+ ),
+ )
+ service.chain_data.on_chain_state = OnChainState.DEPLOYED
+ service.store()
+ else:
+ self.logger.info("Service already deployed")
+
+ info = ocm.info(token_id=service.chain_data.token)
+ service.keys = keys
+ service.chain_data = OnChainData(
+ token=service.chain_data.token,
+ instances=info["instances"],
+ multisig=info["multisig"],
+ staked=False,
+ on_chain_state=service.chain_data.on_chain_state,
+ user_params=service.chain_data.user_params,
)
+ service.store()
- logging.info(f"Activating service {phash}")
- ocm.activate(
- service_id=service_id,
- token=OLAS[ledger["chain"]] if configuration["use_staking"] else None,
- )
- ocm.register(
- service_id=service_id,
- instances=instances,
- agents=[configuration["agent_id"] for _ in instances],
- token=OLAS[ledger["chain"]] if configuration["use_staking"] else None,
- )
+ def terminate_service_on_chain(self, hash: str) -> None:
+ """
+ Terminate service on-chain
- logging.info(f"Deploying service {phash}")
- ocm.deploy(
- service_id=service_id,
- reuse_multisig=reuse_multisig,
- token=OLAS[ledger["chain"]] if configuration["use_staking"] else None,
- )
+ :param hash: Service hash
+ """
+ service = self.create_or_load(hash=hash)
+ if service.chain_data.on_chain_state != OnChainState.DEPLOYED:
+ self.logger.info("Cannot terminate service")
+ return
- logging.info(f"Updating service {phash}")
- info = ocm.info(token_id=service_id)
- service.ledger = ledger
- service.keys = [self.keys.get(key=key) for key in instances]
- service.chain_data = ChainData(
- {
- "token": service_id,
- "instances": info["instances"],
- "multisig": info["multisig"],
- "staked": False,
- }
+ self.logger.info("Terminating service")
+ ocm = self.get_on_chain_manager(service=service)
+ ocm.terminate(
+ service_id=service.chain_data.token,
+ token=(
+ OLAS[service.ledger_config.chain]
+ if service.chain_data.user_params.use_staking
+ else None
+ ),
)
-
- if configuration["use_staking"]:
- ocm.stake(
- service_id=service_id,
- service_registry=CONTRACTS[ledger["chain"]]["service_registry"],
- staking_contract=STAKING[ledger["chain"]],
- )
- service.chain_data["staked"] = True
+ service.chain_data.on_chain_state = OnChainState.TERMINATED
service.store()
- logging.info(f"Building deployment for service {phash}")
- deployment = service.deployment()
- deployment.create({})
- deployment.store()
+ def unbond_service_on_chain(self, hash: str) -> None:
+ """
+ Unbond service on-chain
- return service
+ :param hash: Service hash
+ """
+ service = self.create_or_load(hash=hash)
+ if service.chain_data.on_chain_state != OnChainState.TERMINATED:
+ self.logger.info("Cannot unbond service")
+ return
- def create(self, data: PostServices) -> ServiceType:
- """Create a service."""
- service = self._create(
- phash=data["hash"],
- configuration=data["configuration"],
- )
- return service.json
-
- def update(self, data: PutServices) -> ServiceType:
- """Update service using a template."""
- # NOTE: This method contains a lot of repetative code
- rpc = data["new"]["configuration"]["rpc"]
- phash = data["new"]["hash"]
- if (self.path / phash).exists(): # For testing only
- shutil.rmtree(self.path / phash)
-
- # Load old service
- old = Service.load(path=self.path / data["old"])
- instances = old.chain_data["instances"]
- ocm = OnChainManager(
- rpc=rpc,
- key=self.key,
- contracts=CONTRACTS[old.ledger["chain"]],
+ self.logger.info("Unbonding service")
+ ocm = self.get_on_chain_manager(service=service)
+ ocm.unbond(
+ service_id=service.chain_data.token,
+ token=(
+ OLAS[service.ledger_config.chain]
+ if service.chain_data.user_params.use_staking
+ else None
+ ),
)
+ service.chain_data.on_chain_state = OnChainState.UNBONDED
+ service.store()
- if old.chain_data["staked"]:
- ocm.unstake(
- service_id=old.chain_data["token"],
- staking_contract=STAKING[old.ledger["chain"]],
- )
- old.chain_data["staked"] = False
- old.store()
-
- # Terminate old service
- ocm.terminate(
- service_id=old.chain_data["token"],
- token=OLAS[old.ledger["chain"]] if old.chain_data["staked"] else None,
+ def stake_service_on_chain(self, hash: str) -> None:
+ """
+ Stake service on-chain
+
+ :param hash: Service hash
+ """
+ service = self.create_or_load(hash=hash)
+ if not service.chain_data.user_params.use_staking:
+ self.logger.info("Cannot stake service, `use_staking` is set to false")
+ return
+
+ if service.chain_data.staked:
+ self.logger.info("Cannot stake service, it's already staked")
+ return
+
+ if service.chain_data.on_chain_state != OnChainState.DEPLOYED:
+ self.logger.info("Cannot stake service, it's not in deployed state")
+ return
+
+ ocm = self.get_on_chain_manager(service=service)
+ ocm.stake(
+ service_id=service.chain_data.token,
+ service_registry=CONTRACTS[service.ledger_config.chain]["service_registry"],
+ staking_contract=STAKING[service.ledger_config.chain],
)
+ service.chain_data.staked = True
+ service.store()
- # Unbond old service
- ocm.unbond(
- service_id=old.chain_data["token"],
- token=OLAS[old.ledger["chain"]] if old.chain_data["staked"] else None,
+ def unstake_service_on_chain(self, hash: str) -> None:
+ """
+ Unbond service on-chain
+
+ :param hash: Service hash
+ """
+ service = self.create_or_load(hash=hash)
+ if not service.chain_data.user_params.use_staking:
+ self.logger.info("Cannot unstake service, `use_staking` is set to false")
+ return
+
+ if not service.chain_data.staked:
+ self.logger.info("Cannot unstake service, it's not staked")
+ return
+
+ ocm = self.get_on_chain_manager(service=service)
+ ocm.unstake(
+ service_id=service.chain_data.token,
+ staking_contract=STAKING[service.ledger_config.chain],
)
+ service.chain_data.staked = False
+ service.store()
- # Swap owners on the old safe
- owner, *_ = old.chain_data["instances"]
- owner_key = str(self.keys.get(owner).get("private_key"))
+ def deploy_service_locally(self, hash: str, force: bool = False) -> Deployment:
+ """
+ Deploy service locally
+
+ :param hash: Service hash
+ :param force: Remove previous deployment and start a new one.
+ """
+ deployment = self.create_or_load(hash=hash).deployment
+ deployment.build(force=force)
+ deployment.start()
+ return deployment
+
+ def stop_service_locally(self, hash: str, delete: bool) -> Deployment:
+ """
+ Stop service locally
+
+ :param hash: Service hash
+ :param delete: Delete local deployment.
+ """
+ deployment = self.create_or_load(hash=hash).deployment
+ deployment.stop()
+ if delete:
+ deployment.delete()
+ return deployment
+
+ def update_service(
+ self,
+ old_hash: str,
+ new_hash: str,
+ rpc: t.Optional[str] = None,
+ on_chain_user_params: t.Optional[OnChainUserParams] = None,
+ ) -> Service:
+ """Update a service."""
+ old_service = self.create_or_load(
+ hash=old_hash,
+ )
+ self.unstake_service_on_chain(
+ hash=old_hash,
+ )
+ self.terminate_service_on_chain(
+ hash=old_hash,
+ )
+ self.unbond_service_on_chain(
+ hash=old_hash,
+ )
+ ocm = self.get_on_chain_manager(service=old_service)
+ owner, *_ = old_service.chain_data.instances
ocm.swap(
- service_id=old.chain_data["token"],
- multisig=old.chain_data["multisig"],
- owner_key=owner_key,
+ service_id=old_service.chain_data.token,
+ multisig=old_service.chain_data.multisig,
+ owner_key=str(self.keys_manager.get(key=owner).private_key),
)
- service = self._create(
- phash=phash,
- configuration=data["new"]["configuration"],
- instances=instances,
- reuse_multisig=True,
- update_token=old.chain_data["token"],
+
+ new_service = self.create_or_load(
+ hash=new_hash,
+ rpc=rpc or old_service.ledger_config.rpc,
+ on_chain_user_params=on_chain_user_params
+ or old_service.chain_data.user_params,
)
- old.delete({})
- return service.json
-
- def delete(self, data: DeleteServicesPayload) -> ServicesType:
- """Delete services."""
- for shash in data["hashes"]:
- service = Service.load(path=self.path / shash)
- service.delete(data={})
- return self.json
+ new_service.keys = old_service.keys
+ new_service.chain_data = old_service.chain_data
+ new_service.ledger_config = old_service.ledger_config
+ new_service.chain_data.on_chain_state = OnChainState.NOTMINTED
+ new_service.store()
+
+ self.deploy_service_onchain(hash=new_service.hash)
+ old_service.delete()
+ return new_service
diff --git a/operate/services/service.py b/operate/services/service.py
index c884f045a..090a90c7b 100644
--- a/operate/services/service.py
+++ b/operate/services/service.py
@@ -24,6 +24,7 @@
import shutil
import typing as t
from copy import deepcopy
+from dataclasses import dataclass
from pathlib import Path
from aea.configurations.data_types import PackageType
@@ -41,36 +42,44 @@
VENVS_DIR,
)
from autonomy.deploy.generators.docker_compose.base import DockerComposeGenerator
-from starlette.types import Receive, Scope, Send
-from typing_extensions import TypedDict
from operate.constants import (
- CONFIG,
DEPLOYMENT,
DEPLOYMENT_JSON,
DOCKER_COMPOSE_YAML,
KEYS_JSON,
)
-from operate.http import Resource
-from operate.http.exceptions import NotAllowed, ResourceAlreadyExists
+from operate.http.exceptions import NotAllowed
+from operate.keys import Keys
+from operate.resource import LocalResource
from operate.types import (
- Action,
- ChainData,
ChainType,
DeployedNodes,
DeploymentConfig,
- DeploymentType,
- KeysType,
+ DeploymentStatus,
LedgerConfig,
LedgerType,
- ServiceType,
- Status,
+ OnChainData,
+ OnChainState,
+ OnChainUserParams,
)
-def build_dirs(build_dir: Path) -> None:
- """Build necessary directories."""
+# pylint: disable=no-member,redefined-builtin,too-many-instance-attributes
+
+_ACTIONS = {
+ "status": 0,
+ "build": 1,
+ "deploy": 2,
+ "stop": 3,
+}
+
+DUMMY_MULTISIG = "0xm"
+
+def mkdirs(build_dir: Path) -> None:
+ """Build necessary directories."""
+ build_dir.mkdir(exist_ok=True)
for dir_path in [
(PERSISTENT_DATA_DIR,),
(PERSISTENT_DATA_DIR, LOG_DIR),
@@ -87,24 +96,6 @@ def build_dirs(build_dir: Path) -> None:
continue
-class DeleteServicePayload(TypedDict):
- """Delete payload."""
-
-
-class DeleteServiceResponse(TypedDict):
- """Delete response."""
-
-
-class GetDeployment(TypedDict):
- """Create deployment payload."""
-
-
-class StopDeployment(TypedDict):
- """Delete deployment payload."""
-
- delete: bool
-
-
# TODO: Backport to autonomy
class ServiceBuilder(BaseServiceBuilder):
"""Service builder patch."""
@@ -146,53 +137,85 @@ def try_update_ledger_params(self, chain: str, address: str) -> None:
self.service.overrides = service_overrides
-class Deployment(
- Resource[
- DeploymentType,
- t.Dict,
- DeploymentType,
- t.Dict,
- DeploymentType,
- StopDeployment,
- DeploymentType,
- ]
-):
- """Deployment class."""
-
- action_to_method = {
- Action.STATUS: "GET",
- Action.BUILD: "POST",
- Action.DEPLOY: "PUT",
- Action.STOP: "DELETE",
- }
-
- def __init__(self, status: Status, nodes: DeployedNodes, path: Path) -> None:
+class ServiceHelper:
+ """Service config helper."""
+
+ def __init__(self, path: Path) -> None:
"""Initialize object."""
- super().__init__()
- self.status = status
- self.nodes = nodes
self.path = path
+ self.config = load_service_config(service_path=path)
- @property
- def json(self) -> DeploymentType:
- """Return deployment status."""
- return {
- "status": self.status,
- "nodes": self.nodes,
- }
-
- def create(self, data: t.Dict) -> DeploymentType:
- """Create deployment."""
+ def ledger_config(self) -> "LedgerConfig":
+ """Get ledger config."""
+ # TODO: Multiledger/Multiagent support
+ for override in self.config.overrides:
+ if (
+ override["type"] == "connection"
+ and "valory/ledger" in override["public_id"]
+ ):
+ (_, config), *_ = override["config"]["ledger_apis"].items()
+ return LedgerConfig(
+ rpc=config["address"],
+ chain=ChainType.from_id(cid=config["chain_id"]),
+ type=LedgerType.ETHEREUM,
+ )
+ raise ValueError("No ledger config found.")
+
+ def deployment_config(self) -> DeploymentConfig:
+ """Returns deployment config."""
+ return DeploymentConfig(self.config.json.get("deployment", {})) # type: ignore
+
+
+@dataclass
+class Deployment(LocalResource):
+ """Deployment resource for a service."""
+
+ status: DeploymentStatus
+ nodes: DeployedNodes
+ path: Path
+
+ _file = "deployment.json"
+
+ @staticmethod
+ def new(path: Path) -> "Deployment":
+ """
+ Create a new deployment
+
+ :param path: Path to service
+ """
+ deployment = Deployment(
+ status=DeploymentStatus.CREATED,
+ nodes=DeployedNodes(agent=[], tendermint=[]),
+ path=path,
+ )
+ deployment.store()
+ return deployment
+
+ @classmethod
+ def load(cls, path: Path) -> "Deployment":
+ """Load a service"""
+ return super().load(path) # type: ignore
+
+ def build(self, force: bool = True) -> None:
+ """
+ Build a deployment
+
+ :param force: Remove existing deployment and build a new one
+ :return: Deployment object
+ """
build = self.path / DEPLOYMENT
- if build.exists():
- raise ResourceAlreadyExists("Deployment already exists.")
- build.mkdir()
- build_dirs(build_dir=build)
+ if build.exists() and not force:
+ return
+ if build.exists() and force:
+ shutil.rmtree(build)
+ mkdirs(build_dir=build)
service = Service.load(path=self.path)
keys_file = self.path / KEYS_JSON
- keys_file.write_text(json.dumps(service.keys, indent=4), encoding="utf-8")
-
+ keys_file.write_text(
+ json.dumps([key.json for key in service.keys], indent=4),
+ encoding="utf-8",
+ )
try:
builder = ServiceBuilder.from_dir(
path=service.service_path,
@@ -202,21 +225,21 @@ def create(self, data: t.Dict) -> DeploymentType:
builder.deplopyment_type = DockerComposeGenerator.deployment_type
builder.try_update_abci_connection_params()
builder.try_update_runtime_params(
- multisig_address=service.chain_data.get("multisig"),
- agent_instances=service.chain_data.get("instances"),
+ multisig_address=service.chain_data.multisig,
+ agent_instances=service.chain_data.instances,
consensus_threshold=None,
)
# TODO: Support for multiledger
builder.try_update_ledger_params(
- chain=LedgerType(service.ledger["type"]).name.lower(),
- address=service.ledger["rpc"],
+ chain=LedgerType(service.ledger_config.type).name.lower(),
+ address=service.ledger_config.rpc,
)
# build deployment
(
DockerComposeGenerator(
service_builder=builder,
- build_dir=build,
+ build_dir=build.resolve(),
use_tm_testnet_setup=True,
)
.generate()
@@ -224,19 +247,21 @@ def create(self, data: t.Dict) -> DeploymentType:
.write_config()
.populate_private_keys()
)
- except Exception:
+ except Exception as e:
shutil.rmtree(build)
- raise
+ raise e
- compose = build / DOCKER_COMPOSE_YAML
- with compose.open("r", encoding="utf-8") as stream:
+ with (build / DOCKER_COMPOSE_YAML).open("r", encoding="utf-8") as stream:
deployment = yaml_load(stream=stream)
- self.nodes["agent"] = [
- service for service in deployment["services"] if "_abci_" in service
- ]
- self.nodes["tendermint"] = [
- service for service in deployment["services"] if "_tm_" in service
- ]
+
+ self.nodes = DeployedNodes(
+ agent=[
+ service for service in deployment["services"] if "_abci_" in service
+ ],
+ tendermint=[
+ service for service in deployment["services"] if "_tm_" in service
+ ],
+ )
_volumes = []
for volume, mount in (
@@ -244,139 +269,77 @@ def create(self, data: t.Dict) -> DeploymentType:
):
(build / volume).mkdir(exist_ok=True)
_volumes.append(f"./{volume}:{mount}:Z")
+
for node in deployment["services"]:
if "abci" in node:
deployment["services"][node]["volumes"].extend(_volumes)
- with compose.open("w", encoding="utf-8") as stream:
+
+ with (build / DOCKER_COMPOSE_YAML).open("w", encoding="utf-8") as stream:
yaml_dump(data=deployment, stream=stream)
- self.status = Status.BUILT
+ self.status = DeploymentStatus.BUILT
self.store()
- return self.json
- def update(self, data: t.Dict) -> DeploymentType:
+ def start(self) -> None:
"""Start the service"""
- if self.status != Status.BUILT:
+ if self.status != DeploymentStatus.BUILT:
raise NotAllowed(
- f"The deployment is in {self.status}; It needs to be in {Status.BUILT} status"
+ f"The deployment is in {self.status}; It needs to be in {DeploymentStatus.BUILT} status"
)
- self.status = Status.DEPLOYING
+ self.status = DeploymentStatus.DEPLOYING
self.store()
build = self.path / "deployment"
run_deployment(build_dir=build, detach=True)
- self.status = Status.DEPLOYED
+ self.status = DeploymentStatus.DEPLOYED
self.store()
- return self.json
-
- def delete(self, data: StopDeployment) -> DeploymentType:
- """Delete deployment."""
- build_dir = self.path / "deployment"
- if self.status == self.status:
- self.status = Status.STOPPING
- self.store()
- stop_deployment(build_dir=build_dir)
+ def stop(self) -> None:
+ """Stop the deployment."""
+ if self.status != DeploymentStatus.DEPLOYED:
+ return
- if data.get("delete", False):
- shutil.rmtree(build_dir)
- self.status = Status.CREATED
- self.nodes = {"agent": [], "tendermint": []}
- else:
- self.status = Status.BUILT
+ self.status = DeploymentStatus.STOPPING
self.store()
- return self.json
- def store(self) -> None:
- """Dump deployment config."""
- (self.path / DEPLOYMENT_JSON).write_text(
- json.dumps(self.json, indent=4),
- encoding="utf-8",
- )
+ # Stop the docker deployment
+ stop_deployment(build_dir=self.path / "deployment")
- @classmethod
- def load(cls, path: Path) -> "Deployment":
- """Load service from path."""
- file = path / DEPLOYMENT_JSON
- if file.exists():
- config = json.loads(file.read_text(encoding="utf-8"))
- return cls(
- status=Status(config["status"]),
- nodes=config["nodes"],
- path=path,
- )
- return cls(
- status=Status.CREATED,
- nodes={"agent": [], "tendermint": []},
- path=path,
- )
+ self.status = DeploymentStatus.BUILT
+ self.store()
+ def delete(self) -> None:
+ """Delete the deployment."""
+ shutil.rmtree(self.path / "deployment")
+ self.status = DeploymentStatus.DELETED
+ self.store()
-class ServiceHelper:
- """Service config helper."""
- def __init__(self, path: Path) -> None:
- """Initialize object."""
- self.path = path
- self.config = load_service_config(service_path=path)
+@dataclass
+class Service(LocalResource):
+ """Service class."""
- def ledger_config(self) -> LedgerConfig:
- """Get ledger config."""
- # TODO: Multiledger/Multiagent support
- for override in self.config.overrides:
- if (
- override["type"] == "connection"
- and "valory/ledger" in override["public_id"]
- ):
- (_, config), *_ = override["config"]["ledger_apis"].items()
- return LedgerConfig(
- rpc=config["address"],
- chain=ChainType.from_id(cid=config["chain_id"]),
- type=LedgerType.ETHEREUM,
- )
- raise ValueError("No ledger config found.")
+ hash: str
+ keys: Keys
+ ledger_config: LedgerConfig
+ chain_data: OnChainData
- def deployment_config(self) -> DeploymentConfig:
- """Returns deployment config."""
- return DeploymentConfig(self.config.json.get("deployment", {})) # type: ignore
+ path: Path
+ service_path: Path
+ name: t.Optional[str] = None
-class Service(
- Resource[
- ServiceType,
- t.Dict,
- t.Dict,
- ServiceType,
- ServiceType,
- DeleteServicePayload,
- DeleteServiceResponse,
- ]
-):
- """Service class."""
+ _helper: t.Optional[ServiceHelper] = None
+ _deployment: t.Optional[Deployment] = None
- _helper: t.Optional[ServiceHelper]
-
- def __init__( # pylint: disable=too-many-arguments
- self,
- service_path: Path,
- phash: str,
- keys: KeysType,
- ledger: LedgerConfig,
- chain_data: ChainData,
- name: t.Optional[str] = None,
- ) -> None:
- """Initialize object."""
- super().__init__()
- self.name = name
- self.keys = keys
- self.hash = phash
- self.ledger = ledger
- self.service_path = service_path
- self.chain_data = chain_data or {}
- self.path = self.service_path.parent
- self._helper = None
+ _file = "config.json"
+
+ @classmethod
+ def load(cls, path: Path) -> "Service":
+ """Load a service"""
+ return super().load(path) # type: ignore
@property
def helper(self) -> ServiceHelper:
@@ -385,94 +348,59 @@ def helper(self) -> ServiceHelper:
self._helper = ServiceHelper(path=self.service_path)
return t.cast(ServiceHelper, self._helper)
+ @property
def deployment(self) -> Deployment:
"""Load deployment object for the service."""
- return Deployment.load(path=self.path)
-
- async def access(
- self,
- params: t.Dict,
- scope: Scope,
- receive: Receive,
- send: Send,
- ) -> None:
- """Access service resource."""
- scope["method"] = Deployment.action_to_method[
- Action.from_string(params.pop("action"))
- ]
- await Deployment.load(self.path)(scope=scope, receive=receive, send=send)
-
- @property
- def json(self) -> ServiceType:
- """Return json representation."""
- readme = self.service_path / "README.md"
- return ServiceType(
- {
- "name": str(self.name),
- "hash": self.hash,
- "keys": self.keys,
- "ledger": self.ledger,
- "chain_data": self.chain_data,
- "service_path": str(self.service_path),
- "readme": (
- readme.read_text(encoding="utf-8") if readme.exists() else ""
- ),
- }
- )
-
- def store(self) -> None:
- """Store current state."""
- (self.path / CONFIG).write_text(
- json.dumps(self.json, indent=4),
- encoding="utf-8",
- )
-
- @classmethod
- def load(cls, path: Path) -> "Service":
- """Load service from path."""
- config = json.loads((path / CONFIG).read_text(encoding="utf-8"))
- return cls(
- phash=config["hash"],
- keys=config["keys"],
- ledger=config["ledger"],
- chain_data=config.get("chain_data"),
- service_path=Path(config["service_path"]),
- name=config["name"],
- )
-
- @classmethod
- def new( # pylint: disable=too-many-arguments
- cls,
- path: Path,
- phash: str,
- keys: KeysType,
- ledger: LedgerConfig,
- chain_data: ChainData,
- name: t.Optional[str] = None,
+ if not (self.path / DEPLOYMENT_JSON).exists():
+ self._deployment = Deployment.new(path=self.path)
+ if self._deployment is None:
+ self._deployment = Deployment.load(path=self.path)
+ return t.cast(Deployment, self._deployment)
+
+ @staticmethod
+ def new(
+ hash: str,
+ keys: Keys,
+ rpc: str,
+ on_chain_user_params: OnChainUserParams,
+ storage: Path,
) -> "Service":
"""Create a new service."""
- service_path = path / phash
- service_path.mkdir()
- downloaded = IPFSTool().download(
- hash_id=phash,
- target_dir=service_path,
+ path = storage / hash
+ path.mkdir()
+ service_path = Path(
+ IPFSTool().download(
+ hash_id=hash,
+ target_dir=path,
+ )
)
- if name is None:
- with Path(downloaded, "service.yaml").open("r", encoding="utf-8") as fp:
- config, *_ = yaml_load_all(fp)
- name = config["author"] + "/" + config["name"]
- service = cls(
- phash=phash,
+ with (service_path / "service.yaml").open("r", encoding="utf-8") as fp:
+ config, *_ = yaml_load_all(fp)
+
+ ledger_config = ServiceHelper(path=service_path).ledger_config()
+ service = Service(
+ name=config["author"] + "/" + config["name"],
+ hash=hash,
keys=keys,
- chain_data=chain_data,
- ledger=ledger,
- service_path=Path(downloaded),
- name=name,
+ ledger_config=LedgerConfig(
+ rpc=rpc,
+ type=ledger_config.type,
+ chain=ledger_config.chain,
+ ),
+ chain_data=OnChainData(
+ instances=[],
+ token=-1,
+ multisig=DUMMY_MULTISIG,
+ staked=False,
+ on_chain_state=OnChainState.NOTMINTED,
+ user_params=on_chain_user_params,
+ ),
+ path=service_path.parent,
+ service_path=service_path,
)
service.store()
return service
- def delete(self, data: DeleteServicePayload) -> DeleteServiceResponse:
- """Delete service."""
+ def delete(self) -> None:
+ """Delete a service."""
shutil.rmtree(self.path)
- return DeleteServiceResponse({})
diff --git a/operate/types.py b/operate/types.py
index 7bf7967d8..5a7648ee9 100644
--- a/operate/types.py
+++ b/operate/types.py
@@ -21,8 +21,11 @@
import enum
import typing as t
+from dataclasses import dataclass
-from typing_extensions import NotRequired, TypedDict
+from typing_extensions import TypedDict
+
+from operate.resource import LocalResource
_ACTIONS = {
@@ -84,47 +87,65 @@ def from_id(cls, cid: int) -> "ChainType":
return cls(_CHAIN_NAME_TO_ENUM[_CHAIN_ID_TO_CHAIN_NAME[cid]])
-class ContractAddresses(TypedDict):
- """Contracts templates."""
+class Action(enum.IntEnum):
+ """Action payload."""
- service_manager: str
- service_registry: str
- service_registry_token_utility: str
- gnosis_safe_proxy_factory: str
- gnosis_safe_same_address_multisig: str
- multisend: str
+ STATUS = 0
+ BUILD = 1
+ DEPLOY = 2
+ STOP = 3
+ @classmethod
+ def from_string(cls, action: str) -> "Action":
+ """Load from string."""
+ return cls(_ACTIONS[action])
-class LedgerConfig(TypedDict):
- """Ledger config."""
- rpc: NotRequired[str]
- type: NotRequired[LedgerType]
- chain: NotRequired[ChainType]
+class DeploymentStatus(enum.IntEnum):
+ """Status payload."""
+ CREATED = 0
+ BUILT = 1
+ DEPLOYING = 2
+ DEPLOYED = 3
+ STOPPING = 4
+ STOPPED = 5
+ DELETED = 6
-LedgerConfigs = t.List[LedgerConfig]
+class OnChainState(enum.IntEnum):
+ """On-chain state."""
-class KeyType(TypedDict):
- """Key type."""
+ NOTMINTED = 0
+ MINTED = 1
+ ACTIVATED = 2
+ REGISTERED = 3
+ DEPLOYED = 4
+ TERMINATED = 5
+ UNBONDED = 6
- address: str
- private_key: str
- ledger: ChainType
+class ContractAddresses(TypedDict):
+ """Contracts templates."""
-KeysType = t.List[KeyType]
+ service_manager: str
+ service_registry: str
+ service_registry_token_utility: str
+ gnosis_safe_proxy_factory: str
+ gnosis_safe_same_address_multisig: str
+ multisend: str
-class VariableType(TypedDict):
- """Variable type."""
+@dataclass
+class LedgerConfig(LocalResource):
+ """Ledger config."""
- key: str
- value: str
+ rpc: str
+ type: LedgerType
+ chain: ChainType
-VariablesType = t.List[VariableType]
+LedgerConfigs = t.List[LedgerConfig]
class ServiceState(enum.IntEnum):
@@ -138,46 +159,12 @@ class ServiceState(enum.IntEnum):
TERMINATED_BONDED = 5
-class ChainData(TypedDict):
- """Chain data for service."""
-
- instances: NotRequired[t.List[str]] # Agent instances registered as safe owners
- token: NotRequired[int]
- multisig: NotRequired[str]
- staked: NotRequired[bool]
-
-
-class ChainDeployment(TypedDict):
- """Chain deployment template."""
-
- nft: str
- agent_id: int
- cost_of_bond: int
- threshold: int
- required_funds: float
-
-
class DeploymentConfig(TypedDict):
"""Deployments template."""
volumes: t.Dict[str, str]
-class ServiceType(TypedDict):
- """Service payload."""
-
- name: str
- hash: str
- keys: KeysType
- readme: NotRequired[str]
- ledger: NotRequired[LedgerConfig]
- chain_data: NotRequired[ChainData]
- service_path: NotRequired[str]
-
-
-ServicesType = t.List[ServiceType]
-
-
class FundRequirementsTemplate(TypedDict):
"""Fund requirement template."""
@@ -209,41 +196,48 @@ class ServiceTemplate(TypedDict):
configuration: ConfigurationTemplate
-class Action(enum.IntEnum):
- """Action payload."""
+@dataclass
+class DeployedNodes(LocalResource):
+ """Deployed nodes type."""
- STATUS = 0
- BUILD = 1
- DEPLOY = 2
- STOP = 3
+ agent: t.List[str]
+ tendermint: t.List[str]
- @classmethod
- def from_string(cls, action: str) -> "Action":
- """Load from string."""
- return cls(_ACTIONS[action])
+@dataclass
+class OnChainFundRequirements(LocalResource):
+ """On-chain fund requirements."""
-class Status(enum.IntEnum):
- """Status payload."""
+ agent: float
+ safe: float
- CREATED = 0
- BUILT = 1
- DEPLOYING = 2
- DEPLOYED = 3
- STOPPING = 4
- STOPPED = 5
- DELETED = 6
+@dataclass
+class OnChainUserParams(LocalResource):
+ """On-chain user params."""
-class DeployedNodes(TypedDict):
- """Deployed nodes type."""
+ nft: str
+ agent_id: int
+ threshold: int
+ use_staking: bool
+ cost_of_bond: int
+ olas_cost_of_bond: int
+ olas_required_to_stake: int
+ fund_requirements: OnChainFundRequirements
- agent: t.List[str]
- tendermint: t.List[str]
+ @classmethod
+ def from_json(cls, obj: t.Dict) -> "OnChainUserParams":
+ """Load a service"""
+ return super().from_json(obj) # type: ignore
-class DeploymentType(TypedDict):
- """Deployment type."""
+@dataclass
+class OnChainData(LocalResource):
+ """On-chain data"""
- status: Status
- nodes: DeployedNodes
+ instances: t.List[str] # Agent instances registered as safe owners
+ token: int
+ multisig: str
+ staked: bool
+ on_chain_state: OnChainState
+ user_params: OnChainUserParams
diff --git a/poetry.lock b/poetry.lock
index 123b35517..2145ecb05 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,9 +1,10 @@
-# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand.
[[package]]
name = "aiohttp"
version = "3.9.3"
description = "Async http client/server framework (asyncio)"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -100,6 +101,7 @@ speedups = ["Brotli", "aiodns", "brotlicffi"]
name = "aiosignal"
version = "1.3.1"
description = "aiosignal: a list of registered asynchronous callbacks"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -110,10 +112,26 @@ files = [
[package.dependencies]
frozenlist = ">=1.1.0"
+[[package]]
+name = "annotated-types"
+version = "0.6.0"
+description = "Reusable constraint types to use with typing.Annotated"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
+ {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""}
+
[[package]]
name = "anyio"
version = "4.3.0"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -136,6 +154,7 @@ trio = ["trio (>=0.23)"]
name = "async-timeout"
version = "4.0.3"
description = "Timeout context manager for asyncio programs"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -147,6 +166,7 @@ files = [
name = "attrs"
version = "23.2.0"
description = "Classes Without Boilerplate"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -166,6 +186,7 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p
name = "backoff"
version = "2.2.1"
description = "Function decoration for backoff and retry"
+category = "main"
optional = false
python-versions = ">=3.7,<4.0"
files = [
@@ -177,6 +198,7 @@ files = [
name = "base58"
version = "2.1.1"
description = "Base58 and Base58Check implementation."
+category = "main"
optional = false
python-versions = ">=3.5"
files = [
@@ -191,6 +213,7 @@ tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", "
name = "bcrypt"
version = "4.1.2"
description = "Modern password hashing for your software and your servers"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -231,6 +254,7 @@ typecheck = ["mypy"]
name = "bitarray"
version = "2.9.2"
description = "efficient arrays of booleans -- C extension"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -362,6 +386,7 @@ files = [
name = "certifi"
version = "2024.2.2"
description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -373,6 +398,7 @@ files = [
name = "cffi"
version = "1.16.0"
description = "Foreign Function Interface for Python calling C code."
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -437,6 +463,7 @@ pycparser = "*"
name = "charset-normalizer"
version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
+category = "main"
optional = false
python-versions = ">=3.6.0"
files = [
@@ -451,6 +478,7 @@ unicode-backport = ["unicodedata2"]
name = "clea"
version = "0.1.0rc4"
description = "Framework for writing CLI application quickly"
+category = "main"
optional = false
python-versions = ">=3.8,<4.0"
files = [
@@ -465,6 +493,7 @@ typing-extensions = ">=4.7.1,<5.0.0"
name = "click"
version = "8.0.2"
description = "Composable command line interface toolkit"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -479,6 +508,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""}
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
+category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
@@ -490,6 +520,7 @@ files = [
name = "coverage"
version = "7.4.3"
description = "Code coverage measurement for Python"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -554,6 +585,7 @@ toml = ["tomli"]
name = "cryptography"
version = "42.0.5"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -608,6 +640,7 @@ test-randomorder = ["pytest-randomly"]
name = "cytoolz"
version = "0.12.3"
description = "Cython implementation of Toolz: High performance functional utilities"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -727,6 +760,7 @@ cython = ["cython"]
name = "distro"
version = "1.9.0"
description = "Distro - an OS platform information API"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -738,6 +772,7 @@ files = [
name = "docker"
version = "6.1.2"
description = "A Python library for the Docker Engine API."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -760,6 +795,7 @@ ssh = ["paramiko (>=2.4.3)"]
name = "dockerpty"
version = "0.4.1"
description = "Python library to use the pseudo-tty of a docker container"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -773,6 +809,7 @@ six = ">=1.3.0"
name = "docopt"
version = "0.6.2"
description = "Pythonic argument parser, that will make you smile"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -783,6 +820,7 @@ files = [
name = "ecdsa"
version = "0.16.1"
description = "ECDSA cryptographic signature library (pure python)"
+category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
@@ -801,6 +839,7 @@ gmpy2 = ["gmpy2"]
name = "eth-abi"
version = "5.0.0"
description = "eth_abi: Python utilities for working with Ethereum ABI definitions, especially encoding and decoding"
+category = "main"
optional = false
python-versions = ">=3.8, <4"
files = [
@@ -823,6 +862,7 @@ tools = ["hypothesis (>=4.18.2,<5.0.0)"]
name = "eth-account"
version = "0.8.0"
description = "eth-account: Sign Ethereum transactions and messages with local private keys"
+category = "main"
optional = false
python-versions = ">=3.6, <4"
files = [
@@ -850,6 +890,7 @@ test = ["coverage", "hypothesis (>=4.18.0,<5)", "pytest (>=6.2.5,<7)", "pytest-x
name = "eth-hash"
version = "0.6.0"
description = "eth-hash: The Ethereum hashing function, keccak256, sometimes (erroneously) called sha3"
+category = "main"
optional = false
python-versions = ">=3.8, <4"
files = [
@@ -871,6 +912,7 @@ test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
name = "eth-keyfile"
version = "0.6.1"
description = "A library for handling the encrypted keyfiles used to store ethereum private keys."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -893,6 +935,7 @@ test = ["pytest (>=6.2.5,<7)"]
name = "eth-keys"
version = "0.4.0"
description = "Common API for Ethereum key operations."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -915,6 +958,7 @@ test = ["asn1tools (>=0.146.2,<0.147)", "eth-hash[pycryptodome]", "eth-hash[pysh
name = "eth-rlp"
version = "0.3.0"
description = "eth-rlp: RLP definitions for common Ethereum objects in Python"
+category = "main"
optional = false
python-versions = ">=3.7, <4"
files = [
@@ -937,6 +981,7 @@ test = ["eth-hash[pycryptodome]", "pytest (>=6.2.5,<7)", "pytest-xdist", "tox (=
name = "eth-typing"
version = "3.5.2"
description = "eth-typing: Common type annotations for ethereum python packages"
+category = "main"
optional = false
python-versions = ">=3.7.2, <4"
files = [
@@ -957,6 +1002,7 @@ test = ["pytest (>=7.0.0)", "pytest-xdist (>=2.4.0)"]
name = "eth-utils"
version = "2.3.1"
description = "eth-utils: Common utility functions for python code that interacts with Ethereum"
+category = "main"
optional = false
python-versions = ">=3.7,<4"
files = [
@@ -980,6 +1026,7 @@ test = ["hypothesis (>=4.43.0)", "mypy (==0.971)", "pytest (>=7.0.0)", "pytest-x
name = "exceptiongroup"
version = "1.2.0"
description = "Backport of PEP 654 (exception groups)"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -990,10 +1037,31 @@ files = [
[package.extras]
test = ["pytest (>=6)"]
+[[package]]
+name = "fastapi"
+version = "0.110.0"
+description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "fastapi-0.110.0-py3-none-any.whl", hash = "sha256:87a1f6fb632a218222c5984be540055346a8f5d8a68e8f6fb647b1dc9934de4b"},
+ {file = "fastapi-0.110.0.tar.gz", hash = "sha256:266775f0dcc95af9d3ef39bad55cff525329a931d5fd51930aadd4f428bf7ff3"},
+]
+
+[package.dependencies]
+pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
+starlette = ">=0.36.3,<0.37.0"
+typing-extensions = ">=4.8.0"
+
+[package.extras]
+all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
+
[[package]]
name = "flask"
version = "2.1.3"
description = "A simple framework for building complex web applications."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1003,6 +1071,7 @@ files = [
[package.dependencies]
click = ">=8.0"
+importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""}
itsdangerous = ">=2.0"
Jinja2 = ">=3.0"
Werkzeug = ">=2.0"
@@ -1015,6 +1084,7 @@ dotenv = ["python-dotenv"]
name = "frozenlist"
version = "1.4.1"
description = "A list-like structure which implements collections.abc.MutableSequence"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -1101,6 +1171,7 @@ files = [
name = "gql"
version = "3.5.0"
description = "GraphQL client for Python"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1129,6 +1200,7 @@ websockets = ["websockets (>=10,<12)"]
name = "graphql-core"
version = "3.2.3"
description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL."
+category = "main"
optional = false
python-versions = ">=3.6,<4"
files = [
@@ -1140,6 +1212,7 @@ files = [
name = "h11"
version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1151,6 +1224,7 @@ files = [
name = "hexbytes"
version = "0.3.1"
description = "hexbytes: Python `bytes` subclass that decodes hex, with a readable console output"
+category = "main"
optional = false
python-versions = ">=3.7, <4"
files = [
@@ -1168,6 +1242,7 @@ test = ["eth-utils (>=1.0.1,<3)", "hypothesis (>=3.44.24,<=6.31.6)", "pytest (>=
name = "idna"
version = "3.6"
description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
optional = false
python-versions = ">=3.5"
files = [
@@ -1175,10 +1250,50 @@ files = [
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
]
+[[package]]
+name = "importlib-metadata"
+version = "7.0.2"
+description = "Read metadata from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_metadata-7.0.2-py3-none-any.whl", hash = "sha256:f4bc4c0c070c490abf4ce96d715f68e95923320370efb66143df00199bb6c100"},
+ {file = "importlib_metadata-7.0.2.tar.gz", hash = "sha256:198f568f3230878cb1b44fbd7975f87906c22336dba2e4a7f05278c281fbd792"},
+]
+
+[package.dependencies]
+zipp = ">=0.5"
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+perf = ["ipython"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
+
+[[package]]
+name = "importlib-resources"
+version = "6.3.1"
+description = "Read resources from Python packages"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "importlib_resources-6.3.1-py3-none-any.whl", hash = "sha256:4811639ca7fa830abdb8e9ca0a104dc6ad13de691d9fe0d3173a71304f068159"},
+ {file = "importlib_resources-6.3.1.tar.gz", hash = "sha256:29a3d16556e330c3c8fb8202118c5ff41241cc34cbfb25989bbad226d99b7995"},
+]
+
+[package.dependencies]
+zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["jaraco.collections", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)", "zipp (>=3.17)"]
+
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1190,6 +1305,7 @@ files = [
name = "ipfshttpclient"
version = "0.8.0a2"
description = "Python IPFS HTTP CLIENT library"
+category = "main"
optional = false
python-versions = ">=3.6.2,!=3.7.0,!=3.7.1"
files = [
@@ -1205,6 +1321,7 @@ requests = ">=2.11"
name = "itsdangerous"
version = "2.1.2"
description = "Safely pass data to untrusted environments and back."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1216,6 +1333,7 @@ files = [
name = "jinja2"
version = "3.1.3"
description = "A very fast and expressive template engine."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1233,6 +1351,7 @@ i18n = ["Babel (>=2.7)"]
name = "jsonschema"
version = "4.3.3"
description = "An implementation of JSON Schema validation for Python"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1242,6 +1361,7 @@ files = [
[package.dependencies]
attrs = ">=17.4.0"
+importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""}
pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2"
[package.extras]
@@ -1252,6 +1372,7 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-
name = "lru-dict"
version = "1.2.0"
description = "An Dict like LRU container."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1346,6 +1467,7 @@ test = ["pytest"]
name = "markupsafe"
version = "2.1.5"
description = "Safely add untrusted strings to HTML/XML markup."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1415,6 +1537,7 @@ files = [
name = "morphys"
version = "1.0"
description = "Smart conversions between unicode and bytes types for common cases"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1425,6 +1548,7 @@ files = [
name = "multiaddr"
version = "0.0.9"
description = "Python implementation of jbenet's multiaddr"
+category = "main"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"
files = [
@@ -1442,6 +1566,7 @@ varint = "*"
name = "multidict"
version = "6.0.5"
description = "multidict implementation"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1541,6 +1666,7 @@ files = [
name = "netaddr"
version = "1.2.1"
description = "A network address manipulation library for Python"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1555,6 +1681,7 @@ nicer-shell = ["ipython"]
name = "open-aea"
version = "1.48.0.post1"
description = "Open Autonomous Economic Agent framework (without vendor lock-in)"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -1594,6 +1721,7 @@ test-tools = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "jsonschema (>=4.3
name = "open-aea-cli-ipfs"
version = "1.48.0.post1"
description = "CLI extension for open AEA framework wrapping IPFS functionality."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1610,6 +1738,7 @@ pytest = ">=7.0.0,<7.3.0"
name = "open-aea-ledger-ethereum"
version = "1.48.0.post1"
description = "Python package wrapping the public and private key cryptography and ledger api of Ethereum."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1627,6 +1756,7 @@ web3 = ">=6.0.0,<7"
name = "open-autonomy"
version = "0.14.7"
description = "A framework for the creation of autonomous agent services."
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -1662,6 +1792,7 @@ cli = ["click (==8.0.2)", "coverage (>=6.4.4,<8.0.0)", "open-aea-cli-ipfs (==1.4
name = "packaging"
version = "23.2"
description = "Core utilities for Python packages"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1673,6 +1804,7 @@ files = [
name = "paramiko"
version = "3.4.0"
description = "SSH2 protocol library"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1694,6 +1826,7 @@ invoke = ["invoke (>=2.0)"]
name = "parsimonious"
version = "0.9.0"
description = "(Soon to be) the fastest pure-Python PEG parser I could muster"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1707,6 +1840,7 @@ regex = ">=2022.3.15"
name = "pluggy"
version = "1.4.0"
description = "plugin and hook calling mechanisms for python"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -1722,6 +1856,7 @@ testing = ["pytest", "pytest-benchmark"]
name = "protobuf"
version = "4.24.4"
description = ""
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1744,6 +1879,7 @@ files = [
name = "py-multibase"
version = "1.0.3"
description = "Multibase implementation for Python"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1760,6 +1896,7 @@ six = ">=1.10.0,<2.0"
name = "py-multicodec"
version = "0.2.1"
description = "Multicodec implementation in Python"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1776,6 +1913,7 @@ varint = ">=1.0.2,<2.0.0"
name = "pycparser"
version = "2.21"
description = "C parser in Python"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
@@ -1787,6 +1925,7 @@ files = [
name = "pycryptodome"
version = "3.20.0"
description = "Cryptographic library for Python"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
files = [
@@ -1824,10 +1963,123 @@ files = [
{file = "pycryptodome-3.20.0.tar.gz", hash = "sha256:09609209ed7de61c2b560cc5c8c4fbf892f8b15b1faf7e4cbffac97db1fffda7"},
]
+[[package]]
+name = "pydantic"
+version = "2.6.4"
+description = "Data validation using Python type hints"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"},
+ {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"},
+]
+
+[package.dependencies]
+annotated-types = ">=0.4.0"
+pydantic-core = "2.16.3"
+typing-extensions = ">=4.6.1"
+
+[package.extras]
+email = ["email-validator (>=2.0.0)"]
+
+[[package]]
+name = "pydantic-core"
+version = "2.16.3"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"},
+ {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"},
+ {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"},
+ {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"},
+ {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"},
+ {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"},
+ {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"},
+ {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"},
+ {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"},
+ {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"},
+ {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"},
+ {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"},
+ {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"},
+ {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"},
+ {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"},
+ {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"},
+ {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"},
+ {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"},
+ {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"},
+ {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"},
+ {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"},
+]
+
+[package.dependencies]
+typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
+
[[package]]
name = "pymultihash"
version = "0.8.2"
description = "Python implementation of the multihash specification"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1843,6 +2095,7 @@ sha3 = ["pysha3"]
name = "pynacl"
version = "1.5.0"
description = "Python binding to the Networking and Cryptography (NaCl) library"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1869,6 +2122,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"]
name = "pyrsistent"
version = "0.20.0"
description = "Persistent/Functional/Immutable data structures"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -1910,6 +2164,7 @@ files = [
name = "pytest"
version = "7.2.1"
description = "pytest: simple powerful testing with Python"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -1933,6 +2188,7 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.
name = "python-baseconv"
version = "1.2.2"
description = "Convert numbers from base 10 integers to base X strings and back again."
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1943,6 +2199,7 @@ files = [
name = "python-dotenv"
version = "0.17.1"
description = "Read key-value pairs from a .env file and set them as environment variables"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1957,6 +2214,7 @@ cli = ["click (>=5.0)"]
name = "pyunormalize"
version = "15.1.0"
description = "Unicode normalization forms (NFC, NFKC, NFD, NFKD). A library independent from the Python core Unicode database."
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1967,6 +2225,7 @@ files = [
name = "pywin32"
version = "306"
description = "Python for Window Extensions"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -1990,6 +2249,7 @@ files = [
name = "pyyaml"
version = "6.0.1"
description = "YAML parser and emitter for Python"
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -1998,7 +2258,6 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -2006,16 +2265,8 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -2032,7 +2283,6 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -2040,7 +2290,6 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -2050,6 +2299,7 @@ files = [
name = "regex"
version = "2023.12.25"
description = "Alternative regular expression module, to replace re."
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -2152,6 +2402,7 @@ files = [
name = "requests"
version = "2.28.1"
description = "Python HTTP for Humans."
+category = "main"
optional = false
python-versions = ">=3.7, <4"
files = [
@@ -2173,6 +2424,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
name = "requests-toolbelt"
version = "1.0.0"
description = "A utility belt for advanced users of python-requests"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
@@ -2187,6 +2439,7 @@ requests = ">=2.0.1,<3.0.0"
name = "rlp"
version = "3.0.0"
description = "A package for Recursive Length Prefix encoding and decoding"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -2208,6 +2461,7 @@ test = ["hypothesis (==5.19.0)", "pytest (>=6.2.5,<7)", "tox (>=2.9.1,<3)"]
name = "semver"
version = "2.13.0"
description = "Python helper for Semantic Versioning (http://semver.org/)"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
@@ -2219,6 +2473,7 @@ files = [
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
@@ -2230,6 +2485,7 @@ files = [
name = "sniffio"
version = "1.3.1"
description = "Sniff out which async library your code is running under"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -2239,17 +2495,19 @@ files = [
[[package]]
name = "starlette"
-version = "0.37.1"
+version = "0.36.3"
description = "The little ASGI library that shines."
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
- {file = "starlette-0.37.1-py3-none-any.whl", hash = "sha256:92a816002d4e8c552477b089520e3085bb632e854eb32cef99acb6f6f7830b69"},
- {file = "starlette-0.37.1.tar.gz", hash = "sha256:345cfd562236b557e76a045715ac66fdc355a1e7e617b087834a76a87dcc6533"},
+ {file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"},
+ {file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"},
]
[package.dependencies]
anyio = ">=3.4.0,<5"
+typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""}
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
@@ -2258,6 +2516,7 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7
name = "texttable"
version = "1.6.7"
description = "module to create simple ASCII tables"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -2269,6 +2528,7 @@ files = [
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -2280,6 +2540,7 @@ files = [
name = "toolz"
version = "0.12.1"
description = "List processing tools and functional utilities"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -2291,6 +2552,7 @@ files = [
name = "typing-extensions"
version = "4.10.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -2302,6 +2564,7 @@ files = [
name = "urllib3"
version = "1.26.18"
description = "HTTP library with thread-safe connection pooling, file post, and more."
+category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
@@ -2318,6 +2581,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
name = "uvicorn"
version = "0.27.1"
description = "The lightning-fast ASGI server."
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -2337,6 +2601,7 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)",
name = "valory-docker-compose"
version = "1.29.3"
description = "Multi-container orchestration for Docker"
+category = "main"
optional = false
python-versions = ">=3.4"
files = [
@@ -2365,6 +2630,7 @@ tests = ["ddt (>=1.2.2,<2)", "pytest (<6)"]
name = "varint"
version = "1.0.2"
description = "Simple python varint implementation"
+category = "main"
optional = false
python-versions = "*"
files = [
@@ -2375,6 +2641,7 @@ files = [
name = "watchdog"
version = "4.0.0"
description = "Filesystem events monitoring"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -2416,6 +2683,7 @@ watchmedo = ["PyYAML (>=3.10)"]
name = "web3"
version = "6.15.1"
description = "web3.py"
+category = "main"
optional = false
python-versions = ">=3.7.2"
files = [
@@ -2451,6 +2719,7 @@ tester = ["eth-tester[py-evm] (==v0.9.1-b.2)", "py-geth (>=3.14.0)"]
name = "websocket-client"
version = "0.59.0"
description = "WebSocket client for Python with low level API options"
+category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
files = [
@@ -2465,6 +2734,7 @@ six = "*"
name = "websockets"
version = "12.0"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+category = "main"
optional = false
python-versions = ">=3.8"
files = [
@@ -2546,6 +2816,7 @@ files = [
name = "werkzeug"
version = "2.0.3"
description = "The comprehensive WSGI web application library."
+category = "main"
optional = false
python-versions = ">=3.6"
files = [
@@ -2560,6 +2831,7 @@ watchdog = ["watchdog"]
name = "yarl"
version = "1.9.4"
description = "Yet another URL library"
+category = "main"
optional = false
python-versions = ">=3.7"
files = [
@@ -2659,7 +2931,23 @@ files = [
idna = ">=2.0"
multidict = ">=4.0"
+[[package]]
+name = "zipp"
+version = "3.18.1"
+description = "Backport of pathlib-compatible object wrapper for zip files"
+category = "main"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"},
+ {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"},
+]
+
+[package.extras]
+docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+
[metadata]
lock-version = "2.0"
-python-versions = "^3.10"
-content-hash = "9a2686ee17b595989faa59157c0246e7d012c93f26331e4c85853d0645d1a1e6"
+python-versions = "<4.0,>=3.8"
+content-hash = "45cea12f58880b9cac0df37d3438c9cb71ae22e4d5f2ed0c451b6eba36feeb88"
diff --git a/pyproject.toml b/pyproject.toml
index 25f4ca2e5..87883424f 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -26,6 +26,7 @@ clea = ">=0.1.0rc4"
starlette = ">=0.36.3"
uvicorn = ">=0.27.0"
requests-toolbelt = "1.0.0"
+fastapi = ">=0.110.0"
[build-system]
requires = ["poetry-core"]
diff --git a/scripts/test_e2e.py b/scripts/test_e2e.py
index 1741a23dd..b1a0b29d2 100644
--- a/scripts/test_e2e.py
+++ b/scripts/test_e2e.py
@@ -36,27 +36,22 @@ def test_endpoint_e2e() -> None:
phash = trader_template["hash"]
print("Creating service using template")
+
response = requests.post(
url=f"{BASE_URL}/services",
- json=trader_template,
+ json={**trader_template, "deploy": True},
).json()
print(response)
- input("> Press enter to start")
- print(
- requests.get(
- url=f"{BASE_URL}/services/{phash}/deploy/",
- ).content.decode()
- )
-
input("> Press enter to stop")
print(
- requests.get(
- url=f"{BASE_URL}/services/{phash}/stop/",
+ requests.post(
+ url=f"{BASE_URL}/services/{phash}/deployment/stop",
).content.decode()
)
input("> Press enter to update")
+
# Fund agent instance for swapping
ledger_api = EthereumApi(address="http://localhost:8545")
crypto = EthereumCrypto(".operate/key")
@@ -73,20 +68,25 @@ def test_endpoint_e2e() -> None:
digest = ledger_api.send_signed_transaction(stx)
ledger_api.get_transaction_receipt(tx_digest=digest)
- old = trader_template["hash"]
- trader_template[
- "hash"
- ] = "bafybeicxdpkuk5z5zfbkso7v5pywf4v7chxvluyht7dtgalg6dnhl7ejoe"
+ new_hash = "bafybeicxdpkuk5z5zfbkso7v5pywf4v7chxvluyht7dtgalg6dnhl7ejoe"
print(
requests.put(
url=f"{BASE_URL}/services",
json={
- "old": old,
- "new": trader_template,
+ "old_service_hash": trader_template["hash"],
+ "new_service_hash": new_hash,
+ "deploy": True,
},
).content.decode()
)
+ input("> Press enter to stop")
+ print(
+ requests.post(
+ url=f"{BASE_URL}/services/{new_hash}/deployment/stop",
+ ).content.decode()
+ )
+
if __name__ == "__main__":
test_endpoint_e2e()
diff --git a/tox.ini b/tox.ini
index 6e3735f1f..270ed0ff1 100644
--- a/tox.ini
+++ b/tox.ini
@@ -192,6 +192,8 @@ ignore_missing_imports = True
[mypy-clea.*]
ignore_missing_imports = True
-
[mypy-uvicorn.*]
-ignore_missing_imports = True
\ No newline at end of file
+ignore_missing_imports = True
+
+[mypy-fastapi.*]
+ignore_missing_imports = True