diff --git a/.ci/tests/examples/configure.sh b/.ci/tests/examples/configure.sh index 285374fb6..b3fb9d678 100755 --- a/.ci/tests/examples/configure.sh +++ b/.ci/tests/examples/configure.sh @@ -3,7 +3,7 @@ set -e # Parse example name if [ "$#" -ne 2 ]; then - >&2 echo "Wrong number of arguments (usage: run.sh )" + >&2 echo "Wrong number of arguments (usage: configure.sh )" exit 1 fi example="$1" diff --git a/.ci/tests/examples/inference_test.py b/.ci/tests/examples/inference_test.py new file mode 100644 index 000000000..6e27d2499 --- /dev/null +++ b/.ci/tests/examples/inference_test.py @@ -0,0 +1,35 @@ +import sys +from time import sleep + +import pymongo + +N_CLIENTS = 2 +RETRIES = 18 +SLEEP = 10 + + +def _eprint(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + + +def _wait_n_rounds(collection): + n = 0 + for _ in range(RETRIES): + query = {'type': 'INFERENCE'} + n = collection.count_documents(query) + if n == N_CLIENTS: + return n + _eprint(f'Succeded cleints {n}. Sleeping for {SLEEP}.') + sleep(SLEEP) + _eprint(f'Succeded clients: {n}. Giving up.') + return n + + +if __name__ == '__main__': + # Connect to mongo + client = pymongo.MongoClient("mongodb://fedn_admin:password@localhost:6534") + + # Wait for successful rounds + succeded = _wait_n_rounds(client['fedn-test-network']['control']['status']) + assert(succeded == N_CLIENTS) # check that all rounds succeeded + _eprint(f'Succeded inference clients: {succeded}. Test passed.') diff --git a/.ci/tests/examples/run_inference.sh b/.ci/tests/examples/run_inference.sh new file mode 100755 index 000000000..d78771d70 --- /dev/null +++ b/.ci/tests/examples/run_inference.sh @@ -0,0 +1,19 @@ +#!/bin/bash +set -e + +# Parse example name +if [ "$#" -lt 1 ]; then + >&2 echo "Wrong number of arguments (usage: run_infrence.sh )" + exit 1 +fi +example="$1" + +>&2 echo "Run inference" +pushd "examples/$example" +curl -k -X POST https://localhost:8090/infer + +>&2 echo "Checking inference success" +".$example/bin/python" ../../.ci/tests/examples/inference_test.py + +>&2 echo "Test completed successfully" +popd \ No newline at end of file diff --git a/.ci/tests/examples/wait_for.py b/.ci/tests/examples/wait_for.py index 20454cf2e..7fa75506d 100644 --- a/.ci/tests/examples/wait_for.py +++ b/.ci/tests/examples/wait_for.py @@ -29,7 +29,7 @@ def _retry(try_func, **func_args): def _test_rounds(n_rounds): client = pymongo.MongoClient( "mongodb://fedn_admin:password@localhost:6534") - collection = client['fedn-test-network']['control']['round'] + collection = client['fedn-network']['control']['rounds'] query = {'reducer.status': 'Success'} n = collection.count_documents(query) client.close() diff --git a/.github/workflows/code-checks.yaml b/.github/workflows/code-checks.yaml index c1ec38548..c76c418f9 100644 --- a/.github/workflows/code-checks.yaml +++ b/.github/workflows/code-checks.yaml @@ -18,6 +18,8 @@ jobs: --skip .venv --skip .mnist-keras --skip .mnist-pytorch + --skip fedn_pb2.py + --skip fedn_pb2_grpc.py - name: check Python formatting run: > @@ -25,12 +27,14 @@ jobs: --exclude .venv --exclude .mnist-keras --exclude .mnist-pytorch + --exclude fedn_pb2.py + --exclude fedn_pb2_grpc.py . - name: run Python linter run: > .venv/bin/flake8 . - --exclude ".venv,.mnist-keras,.mnist-pytorch,fedn_pb2.py" + --exclude ".venv,.mnist-keras,.mnist-pytorch,fedn_pb2.py,fedn_pb2_grpc.py" - name: check for floating imports run: > diff --git a/.github/workflows/integration-tests.yaml b/.github/workflows/integration-tests.yaml index d5c49a8f0..1b451975e 100644 --- a/.github/workflows/integration-tests.yaml +++ b/.github/workflows/integration-tests.yaml @@ -15,13 +15,12 @@ jobs: strategy: matrix: to_test: - - "mnist-keras keras" - - "mnist-pytorch pytorch" + - "mnist-keras kerashelper" + - "mnist-pytorch pytorchhelper" python_version: ["3.8", "3.9","3.10"] os: - ubuntu-20.04 - ubuntu-22.04 - - macos-11 runs-on: ${{ matrix.os }} steps: - name: checkout @@ -38,7 +37,10 @@ jobs: - name: run ${{ matrix.to_test }} run: .ci/tests/examples/run.sh ${{ matrix.to_test }} - if: ${{ matrix.os != 'macos-11' }} # skip Docker part for MacOS + + - name: run ${{ matrix.to_test }} inference + run: .ci/tests/examples/run_inference.sh ${{ matrix.to_test }} + if: ${{ matrix.os != 'macos-11' && matrix.to_test == 'mnist-keras keras' }} # example available for Keras - name: print logs if: failure() diff --git a/LICENSE b/LICENSE index ae263d310..a8b7d2c09 100644 --- a/LICENSE +++ b/LICENSE @@ -199,3 +199,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + diff --git a/config/settings-client.yaml.template b/config/settings-client.yaml.template index dbc370cd3..e4035f8d9 100644 --- a/config/settings-client.yaml.template +++ b/config/settings-client.yaml.template @@ -1,3 +1,3 @@ -network_id: fedn-test-network +network_id: fedn-network discover_host: reducer discover_port: 8090 diff --git a/config/settings-combiner.yaml.template b/config/settings-combiner.yaml.template index 39c8c5985..68deff143 100644 --- a/config/settings-combiner.yaml.template +++ b/config/settings-combiner.yaml.template @@ -1,10 +1,10 @@ -network_id: fedn-test-network -controller: - discover_host: reducer - discover_port: 8090 +network_id: fedn-network +discover_host: reducer +discover_port: 8090 + +name: combiner +host: combiner +port: 12080 +max_clients: 30 + -combiner: - name: combiner - host: combiner - port: 12080 - max_clients: 30 diff --git a/config/settings-reducer.yaml.template b/config/settings-reducer.yaml.template index 4f6009e4b..3289b656a 100644 --- a/config/settings-reducer.yaml.template +++ b/config/settings-reducer.yaml.template @@ -1,9 +1,4 @@ -network_id: fedn-test-network -token: fedn_token - -control: - state: idle - helper: keras +network_id: fedn-network statestore: type: MongoDB diff --git a/docker-compose.yaml b/docker-compose.yaml index 4529dc231..610d6e4c0 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -93,7 +93,7 @@ services: - ${HOST_REPO_DIR:-.}/fedn:/app/fedn entrypoint: [ "sh", "-c" ] command: - - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run combiner -in config/settings-combiner.yaml" + - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run combiner --init config/settings-combiner.yaml" ports: - 12080:12080 @@ -110,6 +110,6 @@ services: - ${HOST_REPO_DIR:-.}/fedn:/app/fedn entrypoint: [ "sh", "-c" ] command: - - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run client -in config/settings-client.yaml" + - "/venv/bin/pip install --no-cache-dir -e /app/fedn && /venv/bin/fedn run client --init config/settings-client.yaml" deploy: replicas: 0 diff --git a/examples/mnist-keras/README.md b/examples/mnist-keras/README.md index fde0d3569..b4b5c0672 100644 --- a/examples/mnist-keras/README.md +++ b/examples/mnist-keras/README.md @@ -66,3 +66,22 @@ Finally, you can start the experiment from the "control" tab of the UI. ## Clean up You can clean up by running `docker-compose down`. + +## Connecting to a distributed deployment +To start and remotely connect a client with the required dependencies for this example, start by downloading the `client.yaml` file. You can either navigate the reducer UI or run the following command. + +```bash +curl -k https://:/config/download > client.yaml +``` +> **Note** make sure to replace `` and `` with appropriate values. + +Now you are ready to start the client via Docker by running the following command. + +```bash +docker run -d \ + -v $PWD/client.yaml:/app/client.yaml \ + -v $PWD/data:/var/data \ + -e ENTRYPOINT_OPTS=--data_path=/var/data/mnist.npz \ + ghcr.io/scaleoutsystems/fedn/fedn:develop-mnist-keras run client -in client.yaml +``` +> **Note** If reducer and combiner host names, as specfied in the configuration files, are not resolvable in the client host network you need to use the docker option `--add-hosts` to make them resolvable. Please refer to the Docker documentation for more detail. diff --git a/examples/mnist-keras/bin/init_venv.sh b/examples/mnist-keras/bin/init_venv.sh index 65c860ec4..6856700a9 100755 --- a/examples/mnist-keras/bin/init_venv.sh +++ b/examples/mnist-keras/bin/init_venv.sh @@ -2,7 +2,7 @@ set -e # Init venv -python -m venv .mnist-keras +python3 -m venv .mnist-keras # Pip deps .mnist-keras/bin/pip install --upgrade pip diff --git a/examples/mnist-keras/client/entrypoint b/examples/mnist-keras/client/entrypoint index e9609f2ca..5b1d76c01 100755 --- a/examples/mnist-keras/client/entrypoint +++ b/examples/mnist-keras/client/entrypoint @@ -7,8 +7,9 @@ import fire import numpy as np import tensorflow as tf -from fedn.utils.kerashelper import KerasHelper +from fedn.utils.helpers import get_helper, save_metadata, save_metrics +HELPER_MODULE = 'kerashelper' NUM_CLASSES = 10 @@ -17,7 +18,6 @@ def _get_data_path(): client = docker.from_env() container = client.containers.get(os.environ['HOSTNAME']) number = container.name[-1] - # Return data path return f"/var/data/clients/{number}/mnist.npz" @@ -64,8 +64,8 @@ def _load_data(data_path, is_train=True): def init_seed(out_path='seed.npz'): weights = _compile_model().get_weights() - helper = KerasHelper() - helper.save_model(weights, out_path) + helper = get_helper(HELPER_MODULE) + helper.save(weights, out_path) def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1): @@ -74,8 +74,8 @@ def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1 # Load model model = _compile_model() - helper = KerasHelper() - weights = helper.load_model(in_model_path) + helper = get_helper(HELPER_MODULE) + weights = helper.load(in_model_path) model.set_weights(weights) # Train @@ -83,7 +83,17 @@ def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1 # Save weights = model.get_weights() - helper.save_model(weights, out_model_path) + helper.save(weights, out_model_path) + + # Metadata needed for aggregation server side + metadata = { + 'num_examples': len(x_train), + 'batch_size': batch_size, + 'epochs': epochs, + } + + # Save JSON metadata file + save_metadata(metadata, out_model_path) def validate(in_model_path, out_json_path, data_path=None): @@ -93,8 +103,8 @@ def validate(in_model_path, out_json_path, data_path=None): # Load model model = _compile_model() - helper = KerasHelper() - weights = helper.load_model(in_model_path) + helper = get_helper(HELPER_MODULE) + weights = helper.load(in_model_path) model.set_weights(weights) # Evaluate @@ -111,9 +121,27 @@ def validate(in_model_path, out_json_path, data_path=None): "test_accuracy": model_score_test[1], } + # Save JSON + save_metrics(report, out_json_path) + + +def infer(in_model_path, out_json_path, data_path=None): + # Using test data for inference but another dataset could be loaded + x_test, _ = _load_data(data_path, is_train=False) + + # Load model + model = _compile_model() + helper = get_helper(HELPER_MODULE) + weights = helper.load(in_model_path) + model.set_weights(weights) + + # Infer + y_pred = model.predict(x_test) + y_pred = np.argmax(y_pred, axis=1) + # Save JSON with open(out_json_path, "w") as fh: - fh.write(json.dumps(report)) + fh.write(json.dumps({'predictions': y_pred.tolist()})) if __name__ == '__main__': @@ -121,5 +149,6 @@ if __name__ == '__main__': 'init_seed': init_seed, 'train': train, 'validate': validate, + 'infer': infer, '_get_data_path': _get_data_path, # for testing }) diff --git a/examples/mnist-keras/client/fedn.yaml b/examples/mnist-keras/client/fedn.yaml index 29c475270..91ec40c2a 100644 --- a/examples/mnist-keras/client/fedn.yaml +++ b/examples/mnist-keras/client/fedn.yaml @@ -2,4 +2,6 @@ entry_points: train: command: /venv/bin/python entrypoint train $ENTRYPOINT_OPTS validate: - command: /venv/bin/python entrypoint validate $ENTRYPOINT_OPTS \ No newline at end of file + command: /venv/bin/python entrypoint validate $ENTRYPOINT_OPTS + infer: + command: /venv/bin/python entrypoint infer $ENTRYPOINT_OPTS diff --git a/examples/mnist-pytorch/client/entrypoint b/examples/mnist-pytorch/client/entrypoint index 3a4189cd2..5b671f4b9 100755 --- a/examples/mnist-pytorch/client/entrypoint +++ b/examples/mnist-pytorch/client/entrypoint @@ -1,6 +1,5 @@ #!./.mnist-pytorch/bin/python import collections -import json import math import os @@ -8,8 +7,9 @@ import docker import fire import torch -from fedn.utils.pytorchhelper import PytorchHelper +from fedn.utils.helpers import get_helper, save_metadata, save_metrics +HELPER_MODULE = 'pytorchhelper' NUM_CLASSES = 10 @@ -69,13 +69,13 @@ def _save_model(model, out_path): weights_np = collections.OrderedDict() for w in weights: weights_np[w] = weights[w].cpu().detach().numpy() - helper = PytorchHelper() - helper.save_model(weights, out_path) + helper = get_helper(HELPER_MODULE) + helper.save(weights, out_path) def _load_model(model_path): - helper = PytorchHelper() - weights_np = helper.load_model(model_path) + helper = get_helper(HELPER_MODULE) + weights_np = helper.load(model_path) weights = collections.OrderedDict() for w in weights_np: weights[w] = torch.tensor(weights_np[w]) @@ -118,7 +118,18 @@ def train(in_model_path, out_model_path, data_path=None, batch_size=32, epochs=1 print( f"Epoch {e}/{epochs-1} | Batch: {b}/{n_batches-1} | Loss: {loss.item()}") - # Save + # Metadata needed for aggregation server side + metadata = { + 'num_examples': len(x_train), + 'batch_size': batch_size, + 'epochs': epochs, + 'lr': lr + } + + # Save JSON metadata file + save_metadata(metadata, out_model_path) + + # Save model update _save_model(model, out_model_path) @@ -151,8 +162,7 @@ def validate(in_model_path, out_json_path, data_path=None): } # Save JSON - with open(out_json_path, "w") as fh: - fh.write(json.dumps(report)) + save_metrics(report, out_json_path) if __name__ == '__main__': @@ -160,5 +170,5 @@ if __name__ == '__main__': 'init_seed': init_seed, 'train': train, 'validate': validate, - '_get_data_path': _get_data_path, # for testing + # '_get_data_path': _get_data_path, # for testing }) diff --git a/examples/mnist-pytorch/requirements.txt b/examples/mnist-pytorch/requirements.txt index 3eadc6248..0bf7a6e78 100644 --- a/examples/mnist-pytorch/requirements.txt +++ b/examples/mnist-pytorch/requirements.txt @@ -1,4 +1,4 @@ torch==1.13.1 torchvision==0.14.1 fire==0.3.1 -docker==6.1.1 \ No newline at end of file +docker==6.1.1 diff --git a/fedn/README.md b/fedn/README.md index cd59e8a1b..b90c5e510 100644 --- a/fedn/README.md +++ b/fedn/README.md @@ -1 +1 @@ -# FEDn SDk # +FEDn diff --git a/fedn/cli/run_cmd.py b/fedn/cli/run_cmd.py index 55f93eba8..bf49cd1f0 100644 --- a/fedn/cli/run_cmd.py +++ b/fedn/cli/run_cmd.py @@ -4,14 +4,13 @@ import click import yaml -from fedn.client import Client -from fedn.clients.reducer.restservice import (decode_auth_token, - encode_auth_token) -from fedn.clients.reducer.statestore.mongoreducerstatestore import \ - MongoReducerStateStore -from fedn.combiner import Combiner from fedn.common.exceptions import InvalidClientConfig -from fedn.reducer import Reducer +from fedn.network.clients.client import Client +from fedn.network.combiner.server import Combiner +from fedn.network.dashboard.restservice import (decode_auth_token, + encode_auth_token) +from fedn.network.reducer import Reducer +from fedn.network.statestore.mongostatestore import MongoStateStore from .main import main @@ -40,7 +39,7 @@ def check_helper_config_file(config): return helper -def parse_client_config(config): +def apply_config(config): """Parse client config from file. Override configs from the CLI with settings in config file. @@ -71,7 +70,7 @@ def validate_client_config(config): if 'discover_port' not in config.keys(): config['discover_port'] = None except Exception: - raise InvalidClientConfig("Could not load config appropriately. Check config") + raise InvalidClientConfig("Could not load config from file. Check config") @main.group('run') @@ -87,8 +86,8 @@ def run_cmd(ctx): @run_cmd.command('client') -@click.option('-d', '--discoverhost', required=False) -@click.option('-p', '--discoverport', required=False) +@click.option('-d', '--discoverhost', required=False, help='Hostname for discovery services(reducer).') +@click.option('-p', '--discoverport', required=False, help='Port for discovery services (reducer).') @click.option('--token', required=False, help='Set token provided by reducer if enabled') @click.option('-n', '--name', required=False, default="client" + str(uuid.uuid4())[:8]) @click.option('-i', '--client_id', required=False) @@ -138,7 +137,7 @@ def client_cmd(ctx, discoverhost, discoverport, token, name, client_id, local_pa 'reconnect_after_missed_heartbeat': reconnect_after_missed_heartbeat} if init: - parse_client_config(config) + apply_config(config) validate_client_config(config) @@ -152,7 +151,7 @@ def client_cmd(ctx, discoverhost, discoverport, token, name, client_id, local_pa @click.option('-k', '--secret-key', required=False, help='Set secret key to enable jwt token authentication.') @click.option('-l', '--local-package', is_flag=True, help='Enable use of local compute package') @click.option('-n', '--name', required=False, default="reducer" + str(uuid.uuid4())[:8], help='Set service name') -@click.option('-i', '--init', required=True, default=None, +@click.option('-in', '--init', required=True, default=None, help='Set to a filename to (re)init reducer state from file.') @click.pass_context def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): @@ -161,13 +160,13 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): :param ctx: :param discoverhost: :param discoverport: - :param token: + :param secret_key: :param name: :param init: """ remote = False if local_package else True config = {'host': host, 'port': port, 'secret_key': secret_key, - 'name': name, 'remote_compute_context': remote, 'init': init} + 'name': name, 'remote_compute_package': remote, 'init': init} # Read settings from config file try: @@ -189,7 +188,7 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): # Obtain state from database, in case already initialized (service restart) statestore_config = fedn_config['statestore'] if statestore_config['type'] == 'MongoDB': - statestore = MongoReducerStateStore( + statestore = MongoStateStore( network_id, statestore_config['mongo_config'], defaults=config['init']) else: print("Unsupported statestore type, exiting. ", flush=True) @@ -219,7 +218,7 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): print("Failed to set reducer config in statestore, exiting.", flush=True) exit(-1) - # Configure storage backend (currently supports MinIO) + # Configure storage backend. try: statestore.set_storage_backend(fedn_config['storage']) except KeyError: @@ -229,14 +228,6 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): print("Failed to set storage config in statestore, exiting.", flush=True) exit(-1) - # Configure controller - control_config = fedn_config['control'] - try: - statestore.set_round_config(control_config) - except Exception: - print("Failed to set control config, exiting.", flush=True) - exit(-1) - reducer = Reducer(statestore) reducer.run() @@ -244,17 +235,19 @@ def reducer_cmd(ctx, host, port, secret_key, local_package, name, init): @run_cmd.command('combiner') @click.option('-d', '--discoverhost', required=False, help='Hostname for discovery services (reducer).') @click.option('-p', '--discoverport', required=False, help='Port for discovery services (reducer).') -@click.option('-t', '--token', required=False, help='Specify token for connecting to the reducer.') +@click.option('-t', '--token', required=False, help='Set token provided by reducer if enabled') @click.option('-n', '--name', required=False, default="combiner" + str(uuid.uuid4())[:8], help='Set name for combiner.') @click.option('-h', '--host', required=False, default="combiner", help='Set hostname.') @click.option('-i', '--port', required=False, default=12080, help='Set port.') +@click.option('-f', '--fqdn', required=False, default=None, help='Set fully qualified domain name') @click.option('-s', '--secure', is_flag=True, help='Enable SSL/TLS encrypted gRPC channels.') -@click.option('-v', '--verify', is_flag=True, help='Verify SSL/TLS for REST service') +@click.option('-v', '--verify', is_flag=True, help='Verify SSL/TLS for REST discovery service (reducer)') @click.option('-c', '--max_clients', required=False, default=30, help='The maximal number of client connections allowed.') @click.option('-in', '--init', required=False, default=None, help='Path to configuration file to (re)init combiner.') +@click.option('-a', '--aggregator', required=False, default='fedavg', help='Filename of the aggregator module to use.') @click.pass_context -def combiner_cmd(ctx, discoverhost, discoverport, token, name, host, port, secure, verify, max_clients, init): +def combiner_cmd(ctx, discoverhost, discoverport, token, name, host, port, fqdn, secure, verify, max_clients, init, aggregator): """ :param ctx: @@ -268,33 +261,12 @@ def combiner_cmd(ctx, discoverhost, discoverport, token, name, host, port, secur :param max_clients: :param init: """ - config = {'discover_host': discoverhost, 'discover_port': discoverport, 'token': token, 'myhost': host, - 'myport': port, 'myname': name, 'secure': secure, 'verify': verify, 'max_clients': max_clients, 'init': init} + config = {'discover_host': discoverhost, 'discover_port': discoverport, 'token': token, 'host': host, + 'port': port, 'fqdn': fqdn, 'name': name, 'secure': secure, 'verify': verify, 'max_clients': max_clients, + 'init': init, 'aggregator': aggregator} if config['init']: - with open(config['init'], 'r') as file: - try: - settings = dict(yaml.safe_load(file)) - except yaml.YAMLError as e: - print('Failed to read config from settings file, exiting.', flush=True) - raise (e) - - # Read/overide settings from config file - if 'controller' in settings: - controller_config = settings['controller'] - for key, val in controller_config.items(): - config[key] = val - - if 'combiner' in settings: - combiner_config = settings['combiner'] - config['myname'] = combiner_config['name'] - config['myhost'] = combiner_config['host'] - if 'fqdn' in combiner_config.keys(): - config['fqdn'] = combiner_config['fqdn'] - else: - config['fqdn'] = None - config['myport'] = combiner_config['port'] - config['max_clients'] = combiner_config['max_clients'] + apply_config(config) combiner = Combiner(config) combiner.run() diff --git a/fedn/fedn/aggregators/aggregator.py b/fedn/fedn/aggregators/aggregator.py deleted file mode 100644 index d03a1811f..000000000 --- a/fedn/fedn/aggregators/aggregator.py +++ /dev/null @@ -1,38 +0,0 @@ -from abc import ABC, abstractmethod - - -class AggregatorBase(ABC): - """ Abstract class defining helpers. """ - - @abstractmethod - def __init__(self, id, storage, server, modelservice, control): - """ """ - self.name = "" - self.storage = storage - self.id = id - self.server = server - self.modelservice = modelservice - self.control = control - - @abstractmethod - def on_model_update(self, model_id): - pass - - @abstractmethod - def on_model_validation(self, validation): - pass - - @abstractmethod - def combine_models(self, nr_expected_models=None, nr_required_models=1, helper=None, timeout=180): - pass - - -# def get_aggregator(aggregator_type): -# """ Return an instance of the aggregator class. -# -# :param aggregator_type (str): The aggregator type ('fedavg') -# :return: -# """ -# if helper_type == 'fedavg': -# from fedn.aggregators.fedavg import FedAvgAggregator -# return FedAvgAggregator() diff --git a/fedn/fedn/aggregators/fedavg.py b/fedn/fedn/aggregators/fedavg.py deleted file mode 100644 index 317ddc5eb..000000000 --- a/fedn/fedn/aggregators/fedavg.py +++ /dev/null @@ -1,160 +0,0 @@ -import json -import queue -import time - -import fedn.common.net.grpc.fedn_pb2 as fedn -from fedn.aggregators.aggregator import AggregatorBase - - -class FedAvgAggregator(AggregatorBase): - """ Local SGD / Federated Averaging (FedAvg) aggregator. - - :param id: A reference to id of :class: `fedn.combiner.Combiner` - :type id: str - :param storage: Model repository for :class: `fedn.combiner.Combiner` - :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` - :param server: A handle to the Combiner class :class: `fedn.combiner.Combiner` - :type server: class: `fedn.combiner.Combiner` - :param modelservice: A handle to the model service :class: `fedn.clients.combiner.modelservice.ModelService` - :type modelservice: class: `fedn.clients.combiner.modelservice.ModelService` - :param control: A handle to the :class: `fedn.clients.combiner.roundcontrol.RoundControl` - :type control: class: `fedn.clients.combiner.roundcontrol.RoundControl` - - """ - - def __init__(self, id, storage, server, modelservice, control): - """Constructor method - """ - - super().__init__(id, storage, server, modelservice, control) - - self.name = "FedAvg" - self.validations = {} - self.model_updates = queue.Queue() - - def on_model_update(self, model_id): - """Callback when a new model update is recieved from a client. - Performs (optional) pre-processing and the puts the update id - on the aggregation queue. - - :param model_id: ID of model update - :type model_id: str - """ - try: - self.server.report_status("AGGREGATOR({}): callback received model {}".format(self.name, model_id), - log_level=fedn.Status.INFO) - - # Push the model update to the processing queue - self.model_updates.put(model_id) - except Exception as e: - self.server.report_status("AGGREGATOR({}): Failed to receive candidate model! {}".format(self.name, e), - log_level=fedn.Status.WARNING) - pass - - def on_model_validation(self, validation): - """ Callback when a new model validation is recieved from a client. - - :param validation: Dict containing validation data sent by client. - Must be valid JSON. - :type validation: dict - """ - - # Currently, the validations are actually sent as status messages - # directly in the client, so here we are just storing them in the - # combiner memory. This will need to be refactored later so that this - # callback is responsible for reporting the validation to the db. - - model_id = validation.model_id - data = json.loads(validation.data) - try: - self.validations[model_id].append(data) - except KeyError: - self.validations[model_id] = [data] - - self.server.report_status("AGGREGATOR({}): callback processed validation {}".format(self.name, validation.model_id), - log_level=fedn.Status.INFO) - - def combine_models(self, nr_expected_models=None, nr_required_models=1, helper=None, timeout=180): - """Compute a running average of model updates. - - :param nr_expected_models: The number of updates expected in this round, defaults to None - :type nr_expected_models: int, optional - :param nr_required_models: The number of updates needed to a valid round, defaults to 1 - :type nr_required_models: int, optional - :param helper: An instance of :class: `fedn.utils.helpers.HelperBase`, ML framework specific helper, defaults to None - :type helper: class: `fedn.utils.helpers.HelperBase`, optional - :param timeout: Timeout for model updates, defaults to 180 - :type timeout: int, optional - :return: The global model and metadata - :rtype: tuple - """ - - data = {} - data['time_model_load'] = 0.0 - data['time_model_aggregation'] = 0.0 - - self.server.report_status( - "AGGREGATOR({}): Aggregating model updates...".format(self.name)) - - round_time = 0.0 - polling_interval = 1.0 - nr_processed_models = 0 - while nr_processed_models < nr_expected_models: - try: - model_id = self.model_updates.get(block=False) - self.server.report_status( - "AGGREGATOR({}): Received model update with id {}".format(self.name, model_id)) - - # Load the model update from disk - tic = time.time() - model_str = self.control.load_model_fault_tolerant(model_id) - if model_str: - try: - model_next = helper.load_model_from_BytesIO( - model_str.getbuffer()) - except IOError: - self.server.report_status( - "AGGREGATOR({}): Failed to load model!".format(self.name)) - else: - raise - data['time_model_load'] += time.time() - tic - - # Aggregate / reduce - tic = time.time() - if nr_processed_models == 0: - model = model_next - else: - model = helper.increment_average( - model, model_next, nr_processed_models + 1) - data['time_model_aggregation'] += time.time() - tic - - nr_processed_models += 1 - self.model_updates.task_done() - except queue.Empty: - self.server.report_status("AGGREGATOR({}): waiting for model updates: {} of {} completed.".format(self.name, - nr_processed_models, - nr_expected_models)) - time.sleep(polling_interval) - round_time += polling_interval - except Exception as e: - self.server.report_status( - "AGGERGATOR({}): Error encoutered while reading model update, skipping this update. {}".format(self.name, e)) - nr_expected_models -= 1 - if nr_expected_models <= 0: - return None, data - self.model_updates.task_done() - - if round_time >= timeout: - self.server.report_status("AGGREGATOR({}): training round timed out.".format( - self.name), log_level=fedn.Status.WARNING) - # TODO: Generalize policy for what to do in case of timeout. - if nr_processed_models >= nr_required_models: - break - else: - return None, data - - data['nr_successful_updates'] = nr_processed_models - - self.server.report_status("AGGREGATOR({}): Training round completed, aggregated {} models.".format(self.name, nr_processed_models), - log_level=fedn.Status.INFO) - return model, data diff --git a/fedn/fedn/clients/combiner/roundcontrol.py b/fedn/fedn/clients/combiner/roundcontrol.py deleted file mode 100644 index 7e19065f7..000000000 --- a/fedn/fedn/clients/combiner/roundcontrol.py +++ /dev/null @@ -1,319 +0,0 @@ -import queue -import random -import sys -import time -import uuid - -from fedn.aggregators.fedavg import FedAvgAggregator -from fedn.utils.helpers import get_helper - - -class RoundControl: - """ Combiner level round controller. - - The controller recieves round configurations from the global controller - and acts on them by soliciting model updates and model validations from - the connected clients. - - :param id: A reference to id of :class: `fedn.combiner.Combiner` - :type id: str - :param storage: Model repository for :class: `fedn.combiner.Combiner` - :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` - :param server: A handle to the Combiner class :class: `fedn.combiner.Combiner` - :type server: class: `fedn.combiner.Combiner` - :param modelservice: A handle to the model service :class: `fedn.clients.combiner.modelservice.ModelService` - :type modelservice: class: `fedn.clients.combiner.modelservice.ModelService` - """ - - def __init__(self, id, storage, server, modelservice): - - self.id = id - self.round_configs = queue.Queue() - self.storage = storage - self.server = server - self.modelservice = modelservice - self.config = {} - - # TODO, make runtime configurable - - self.aggregator = FedAvgAggregator( - self.id, self.storage, self.server, self.modelservice, self) - - def push_round_config(self, round_config): - """ Recieve a round_config (job description) and push on the queue. - - :param round_config: A dict containing round configurations. - :type round_config: dict - :return: A generated job id (universally unique identifier) for the round configuration - :rtype: str - """ - try: - - round_config['_job_id'] = str(uuid.uuid4()) - self.round_configs.put(round_config) - except Exception: - self.server.report_status( - "ROUNDCONTROL: Failed to push round config.", flush=True) - raise - return round_config['_job_id'] - - def load_model_fault_tolerant(self, model_id, retry=3): - """Load model update object. - - :param model_id: The ID of the model - :type model_id: str - :param retry: number of times retrying load model update, defaults to 3 - :type retry: int, optional - :return: Updated model - :rtype: class: `io.BytesIO` - """ - # Try reading model update from local disk/combiner memory - model_str = self.modelservice.models.get(model_id) - # And if we cannot access that, try downloading from the server - if model_str is None: - model_str = self.modelservice.get_model(model_id) - # TODO: use retrying library - tries = 0 - while tries < retry: - tries += 1 - if not model_str or sys.getsizeof(model_str) == 80: - self.server.report_status( - "ROUNDCONTROL: Model download failed. retrying", flush=True) - - time.sleep(1) - model_str = self.modelservice.get_model(model_id) - - return model_str - - def _training_round(self, config, clients): - """Send model update requests to clients and aggregate results. - - :param config: [description] - :type config: [type] - :param clients: [description] - :type clients: [type] - :return: [description] - :rtype: [type] - """ - - # We flush the queue at a beginning of a round (no stragglers allowed) - # TODO: Support other ways to handle stragglers. - with self.aggregator.model_updates.mutex: - self.aggregator.model_updates.queue.clear() - - self.server.report_status( - "ROUNDCONTROL: Initiating training round, participating members: {}".format(clients)) - self.server.request_model_update(config['model_id'], clients=clients) - - meta = {} - meta['nr_expected_updates'] = len(clients) - meta['nr_required_updates'] = int(config['clients_required']) - meta['timeout'] = float(config['round_timeout']) - tic = time.time() - model = None - data = None - try: - helper = get_helper(config['helper_type']) - model, data = self.aggregator.combine_models(nr_expected_models=len(clients), - nr_required_models=int( - config['clients_required']), - helper=helper, timeout=float(config['round_timeout'])) - except Exception as e: - print("TRAINING ROUND FAILED AT COMBINER! {}".format(e), flush=True) - meta['time_combination'] = time.time() - tic - meta['aggregation_time'] = data - return model, meta - - def _validation_round(self, config, clients, model_id): - """[summary] - - :param config: [description] - :type config: [type] - :param clients: [description] - :type clients: [type] - :param model_id: [description] - :type model_id: [type] - """ - self.server.request_model_validation(model_id, clients=clients) - - def stage_model(self, model_id, timeout_retry=3, retry=2): - """Download model from persistent storage. - - :param model_id: ID of the model update object to stage. - :type model_id: str - :param timeout_retry: Sleep before retrying download again (sec), defaults to 3 - :type timeout_retry: int, optional - :param retry: Number of retries, defaults to 2 - :type retry: int, optional - """ - - # If the model is already in memory at the server we do not need to do anything. - if self.modelservice.models.exist(model_id): - return - - # If it is not there, download it from storage and stage it in memory at the server. - tries = 0 - while True: - try: - model = self.storage.get_model_stream(model_id) - if model: - break - except Exception: - self.server.report_status("ROUNDCONTROL: Could not fetch model from storage backend, retrying.", - flush=True) - time.sleep(timeout_retry) - tries += 1 - if tries > retry: - self.server.report_status( - "ROUNDCONTROL: Failed to stage model {} from storage backend!".format(model_id), flush=True) - return - - self.modelservice.set_model(model, model_id) - - def __assign_round_clients(self, n, type="trainers"): - """ Obtain a list of clients (trainers or validators) to talk to in a round. - - :param n: Size of a random set taken from active trainers (clients), if n > "active trainers" all is used - :type n: int - :param type: type of clients, either "trainers" or "validators", defaults to "trainers" - :type type: str, optional - :return: Set of clients - :rtype: list - """ - - if type == "validators": - clients = self.server.get_active_validators() - elif type == "trainers": - clients = self.server.get_active_trainers() - else: - self.server.report_status( - "ROUNDCONTROL(ERROR): {} is not a supported type of client".format(type), flush=True) - raise - - # If the number of requested trainers exceeds the number of available, use all available. - if n > len(clients): - n = len(clients) - - # If not, we pick a random subsample of all available clients. - - clients = random.sample(clients, n) - - return clients - - def __check_nr_round_clients(self, config, timeout=0.0): - """Check that the minimal number of required clients to start a round are connected. - - :param config: [description] - :type config: [type] - :param timeout: [description], defaults to 0.0 - :type timeout: float, optional - :return: [description] - :rtype: [type] - """ - - ready = False - t = 0.0 - while not ready: - active = self.server.nr_active_trainers() - - if active >= int(config['clients_requested']): - return True - else: - self.server.report_status("waiting for {} clients to get started, currently: {}".format( - int(config['clients_requested']) - active, - active), flush=True) - if t >= timeout: - if active >= int(config['clients_required']): - return True - else: - return False - - time.sleep(1.0) - t += 1.0 - - return ready - - def execute_validation(self, round_config): - """ Coordinate validation rounds as specified in config. - - :param round_config: [description] - :type round_config: [type] - """ - model_id = round_config['model_id'] - self.server.report_status( - "COMBINER orchestrating validation of model {}".format(model_id)) - self.stage_model(model_id) - validators = self.__assign_round_clients( - self.server.max_clients, type="validators") - self._validation_round(round_config, validators, model_id) - - def execute_training(self, config): - """ Coordinates clients to execute training and validation tasks. """ - - round_meta = {} - round_meta['config'] = config - round_meta['round_id'] = config['round_id'] - - self.stage_model(config['model_id']) - - # Execute the configured number of rounds - round_meta['local_round'] = {} - for r in range(1, int(config['rounds']) + 1): - self.server.report_status( - "ROUNDCONTROL: Starting training round {}".format(r), flush=True) - clients = self.__assign_round_clients(self.server.max_clients) - model, meta = self._training_round(config, clients) - round_meta['local_round'][str(r)] = meta - if model is None: - self.server.report_status( - "\t Failed to update global model in round {0}!".format(r)) - - if model is not None: - helper = get_helper(config['helper_type']) - a = helper.serialize_model_to_BytesIO(model) - # Send aggregated model to server - model_id = str(uuid.uuid4()) - self.modelservice.set_model(a, model_id) - a.close() - - # Update Combiner latest model - self.server.set_active_model(model_id) - - print("------------------------------------------") - self.server.report_status( - "ROUNDCONTROL: TRAINING ROUND COMPLETED.", flush=True) - print("\n") - return round_meta - - def run(self): - """ Main control loop. Sequentially execute rounds based on round config. - - """ - try: - while True: - try: - round_config = self.round_configs.get(block=False) - - ready = self.__check_nr_round_clients(round_config) - if ready: - if round_config['task'] == 'training': - tic = time.time() - round_meta = self.execute_training(round_config) - round_meta['time_exec_training'] = time.time() - \ - tic - round_meta['name'] = self.id - self.server.tracer.set_round_meta(round_meta) - elif round_config['task'] == 'validation': - self.execute_validation(round_config) - else: - self.server.report_status( - "ROUNDCONTROL: Round config contains unkown task type.", flush=True) - else: - self.server.report_status( - "ROUNDCONTROL: Failed to meet client allocation requirements for this round config.", flush=True) - - except queue.Empty: - time.sleep(1) - - except (KeyboardInterrupt, SystemExit): - pass diff --git a/fedn/fedn/clients/reducer/control.py b/fedn/fedn/clients/reducer/control.py deleted file mode 100644 index 25de88344..000000000 --- a/fedn/fedn/clients/reducer/control.py +++ /dev/null @@ -1,581 +0,0 @@ -import copy -import os -import time -import uuid -from datetime import datetime - -import fedn.utils.helpers -from fedn.clients.reducer.interfaces import CombinerUnavailableError -from fedn.clients.reducer.network import Network -from fedn.common.storage.s3.s3repo import S3ModelRepository -from fedn.common.tracer.mongotracer import MongoTracer - -from .state import ReducerState - - -class UnsupportedStorageBackend(Exception): - pass - - -class MisconfiguredStorageBackend(Exception): - pass - - -class ReducerControl: - """ Main conroller for training round. - - """ - - def __init__(self, statestore): - - self.__state = ReducerState.setup - self.statestore = statestore - if self.statestore.is_inited(): - self.network = Network(self, statestore) - - try: - config = self.statestore.get_storage_backend() - except Exception: - print( - "REDUCER CONTROL: Failed to retrive storage configuration, exiting.", flush=True) - raise MisconfiguredStorageBackend() - if not config: - print( - "REDUCER CONTROL: No storage configuration available, exiting.", flush=True) - raise MisconfiguredStorageBackend() - - if config['storage_type'] == 'S3': - - self.model_repository = S3ModelRepository(config['storage_config']) - else: - print("REDUCER CONTROL: Unsupported storage backend, exiting.", flush=True) - raise UnsupportedStorageBackend() - - self.client_allocation_policy = self.client_allocation_policy_least_packed - - if self.statestore.is_inited(): - self.__state = ReducerState.idle - - def get_helper(self): - """ - - :return: - """ - helper_type = self.statestore.get_framework() - helper = fedn.utils.helpers.get_helper(helper_type) - if not helper: - print("CONTROL: Unsupported helper type {}, please configure compute_context.helper !".format(helper_type), - flush=True) - return None - return helper - - def delete_bucket_objects(self): - """ - - :return: - """ - return self.model_repository.delete_objects() - - def get_state(self): - """ - - :return: - """ - return self.__state - - def idle(self): - """ - - :return: - """ - if self.__state == ReducerState.idle: - return True - else: - return False - - def get_first_model(self): - """ - - :return: - """ - return self.statestore.get_first() - - def get_latest_model(self): - """ - - :return: - """ - return self.statestore.get_latest() - - def get_model_info(self): - """ - - :return: - """ - return self.statestore.get_model_info() - - def get_events(self): - """ - - :return: - """ - return self.statestore.get_events() - - def drop_models(self): - """ - - """ - self.statestore.drop_models() - - def get_compute_context(self): - """ - - :return: - """ - definition = self.statestore.get_compute_context() - if definition: - try: - context = definition['filename'] - return context - except (IndexError, KeyError): - print( - "No context filename set for compute context definition", flush=True) - return None - else: - return None - - def set_compute_context(self, filename, path): - """ Persist the configuration for the compute package. """ - self.model_repository.set_compute_context(filename, path) - self.statestore.set_compute_context(filename) - - def get_compute_package(self, compute_package=''): - """ - - :param compute_package: - :return: - """ - if compute_package == '': - compute_package = self.get_compute_context() - return self.model_repository.get_compute_package(compute_package) - - def commit(self, model_id, model=None): - """ Commit a model to the global model trail. The model commited becomes the lastest consensus model. """ - - helper = self.get_helper() - if model is not None: - print("Saving model to disk...", flush=True) - outfile_name = helper.save_model(model) - print("DONE", flush=True) - print("Uploading model to Minio...", flush=True) - model_id = self.model_repository.set_model( - outfile_name, is_file=True) - print("DONE", flush=True) - os.unlink(outfile_name) - - self.statestore.set_latest(model_id) - - def _out_of_sync(self, combiners=None): - - if not combiners: - combiners = self.network.get_combiners() - - osync = [] - for combiner in combiners: - try: - model_id = combiner.get_model_id() - except CombinerUnavailableError: - self._handle_unavailable_combiner(combiner) - model_id = None - - if model_id and (model_id != self.get_latest_model()): - osync.append(combiner) - return osync - - def check_round_participation_policy(self, compute_plan, combiner_state): - """ Evaluate reducer level policy for combiner round-participation. - This is a decision on ReducerControl level, additional checks - applies on combiner level. Not all reducer control flows might - need or want to use a participation policy. """ - - if compute_plan['task'] == 'training': - nr_active_clients = int(combiner_state['nr_active_trainers']) - elif compute_plan['task'] == 'validation': - nr_active_clients = int(combiner_state['nr_active_validators']) - else: - print("Invalid task type!", flush=True) - return False - - if int(compute_plan['clients_required']) <= nr_active_clients: - return True - else: - return False - - def check_round_start_policy(self, combiners): - """ Check if the overall network state meets the policy to start a round. """ - if len(combiners) > 0: - return True - else: - return False - - def check_round_validity_policy(self, combiners): - """ - At the end of the round, before committing a model to the model ledger, - we check if a round validity policy has been met. This can involve - e.g. asserting that a certain number of combiners have reported in an - updated model, or that criteria on model performance have been met. - """ - if combiners == []: - return False - else: - return True - - def _handle_unavailable_combiner(self, combiner): - """ This callback is triggered if a combiner is found to be unresponsive. """ - # TODO: Implement strategy to handle the case. - print("REDUCER CONTROL: Combiner {} unavailable.".format( - combiner.name), flush=True) - - def _select_round_combiners(self, compute_plan): - combiners = [] - for combiner in self.network.get_combiners(): - try: - combiner_state = combiner.report() - except CombinerUnavailableError: - self._handle_unavailable_combiner(combiner) - combiner_state = None - - if combiner_state: - is_participating = self.check_round_participation_policy( - compute_plan, combiner_state) - if is_participating: - combiners.append((combiner, compute_plan)) - return combiners - - def round(self, config, round_number): - """ Execute one global round. """ - - round_meta = {'round_id': round_number} - - if len(self.network.get_combiners()) < 1: - print("REDUCER: No combiners connected!") - return None, round_meta - - # 1. Formulate compute plans for this round and determine which combiners should participate in the round. - compute_plan = copy.deepcopy(config) - compute_plan['rounds'] = 1 - compute_plan['round_id'] = round_number - compute_plan['task'] = 'training' - compute_plan['model_id'] = self.get_latest_model() - compute_plan['helper_type'] = self.statestore.get_framework() - - round_meta['compute_plan'] = compute_plan - - combiners = [] - for combiner in self.network.get_combiners(): - - try: - combiner_state = combiner.report() - except CombinerUnavailableError: - self._handle_unavailable_combiner(combiner) - combiner_state = None - - if combiner_state is not None: - is_participating = self.check_round_participation_policy( - compute_plan, combiner_state) - if is_participating: - combiners.append((combiner, compute_plan)) - - round_start = self.check_round_start_policy(combiners) - - print("CONTROL: round start policy met, participating combiners {}".format( - combiners), flush=True) - if not round_start: - print("CONTROL: Round start policy not met, skipping round!", flush=True) - return None - - # 2. Sync up and ask participating combiners to coordinate model updates - # TODO refactor - - statestore_config = self.statestore.get_config() - - self.tracer = MongoTracer( - statestore_config['mongo_config'], statestore_config['network_id']) - - start_time = datetime.now() - - for combiner, compute_plan in combiners: - try: - self.sync_combiners([combiner], self.get_latest_model()) - _ = combiner.start(compute_plan) - except CombinerUnavailableError: - # This is OK, handled by round accept policy - self._handle_unavailable_combiner(combiner) - pass - except Exception: - # Unknown error - raise - - # Wait until participating combiners have a model that is out of sync with the current global model. - # TODO: We do not need to wait until all combiners complete before we start reducing. - cl = [] - for combiner, plan in combiners: - cl.append(combiner) - - wait = 0.0 - while len(self._out_of_sync(cl)) < len(combiners): - time.sleep(1.0) - wait += 1.0 - if wait >= config['round_timeout']: - break - - # TODO refactor - end_time = datetime.now() - round_time = end_time - start_time - self.tracer.set_combiner_time(round_number, round_time.seconds) - - round_meta['time_combiner_update'] = round_time.seconds - - # OBS! Here we are checking against all combiners, not just those that computed in this round. - # This means we let straggling combiners participate in the update - updated = self._out_of_sync() - print("COMBINERS UPDATED MODELS: {}".format(updated), flush=True) - - print("Checking round validity policy...", flush=True) - round_valid = self.check_round_validity_policy(updated) - if not round_valid: - # TODO: Should we reset combiner state here? - print("REDUCER CONTROL: Round invalid!", flush=True) - return None, round_meta - print("Round valid.") - - print("Starting reducing models...", flush=True) - # 3. Reduce combiner models into a global model - try: - model, data = self.reduce(updated) - round_meta['reduce'] = data - except Exception as e: - print("CONTROL: Failed to reduce models from combiners: {}".format( - updated), flush=True) - print(e, flush=True) - return None, round_meta - print("DONE", flush=True) - - # 6. Commit the global model to the ledger - print("Committing global model...", flush=True) - if model is not None: - # Commit to model ledger - tic = time.time() - - model_id = uuid.uuid4() - self.commit(model_id, model) - round_meta['time_commit'] = time.time() - tic - else: - print("REDUCER: failed to update model in round with config {}".format( - config), flush=True) - return None, round_meta - print("DONE", flush=True) - - # 4. Trigger participating combiner nodes to execute a validation round for the current model - validate = config['validate'] - if validate: - combiner_config = copy.deepcopy(config) - combiner_config['model_id'] = self.get_latest_model() - combiner_config['task'] = 'validation' - combiner_config['helper_type'] = self.statestore.get_framework() - - validating_combiners = self._select_round_combiners( - combiner_config) - - for combiner, combiner_config in validating_combiners: - try: - self.sync_combiners([combiner], self.get_latest_model()) - combiner.start(combiner_config) - except CombinerUnavailableError: - # OK if validation fails for a combiner - self._handle_unavailable_combiner(combiner) - pass - - # 5. Check commit policy based on validation result (optionally) - # TODO: Implement. - - return model_id, round_meta - - def sync_combiners(self, combiners, model_id): - """ Spread the current consensus model to all active combiner nodes. """ - if not model_id: - print("GOT NO MODEL TO SET! Have you seeded the FedML model?", flush=True) - return - for combiner in combiners: - _ = combiner.set_model_id(model_id) - - def instruct(self, config): - """ Main entrypoint, executes the compute plan. """ - - if self.__state == ReducerState.instructing: - print("Already set in INSTRUCTING state", flush=True) - return - - self.__state = ReducerState.instructing - - if not self.get_latest_model(): - print("No model in model chain, please seed the alliance!") - - self.__state = ReducerState.monitoring - - # TODO: Validate and set the round config object - # self.set_config(config) - - # TODO: Refactor - - statestore_config = self.statestore.get_config() - self.tracer = MongoTracer( - statestore_config['mongo_config'], statestore_config['network_id']) - last_round = self.tracer.get_latest_round() - - for round in range(1, int(config['rounds'] + 1)): - tic = time.time() - if last_round: - current_round = last_round + round - else: - current_round = round - - start_time = datetime.now() - # start round monitor - self.tracer.start_monitor(round) - # todo add try except bloc for round meta - model_id = None - round_meta = {'round_id': current_round} - try: - model_id, round_meta = self.round(config, current_round) - except TypeError: - print("Could not unpack data from round...", flush=True) - - end_time = datetime.now() - - if model_id: - print("REDUCER: Global round completed, new model: {}".format( - model_id), flush=True) - round_time = end_time - start_time - self.tracer.set_latest_time(current_round, round_time.seconds) - round_meta['status'] = 'Success' - else: - print("REDUCER: Global round failed!") - round_meta['status'] = 'Failed' - - # stop round monitor - self.tracer.stop_monitor() - round_meta['time_round'] = time.time() - tic - self.tracer.set_round_meta_reducer(round_meta) - - self.__state = ReducerState.idle - - def reduce(self, combiners): - """ Combine current models at Combiner nodes into one global model. """ - - meta = {} - meta['time_fetch_model'] = 0.0 - meta['time_load_model'] = 0.0 - meta['time_aggregate_model'] = 0.0 - - i = 1 - model = None - for combiner in combiners: - - # TODO: Handle inactive RPC error in get_model and raise specific error - try: - tic = time.time() - data = combiner.get_model() - meta['time_fetch_model'] += (time.time() - tic) - except Exception: - pass - - helper = self.get_helper() - - if data is not None: - try: - tic = time.time() - model_str = combiner.get_model().getbuffer() - model_next = helper.load_model_from_BytesIO(model_str) - meta['time_load_model'] += (time.time() - tic) - tic = time.time() - model = helper.increment_average(model, model_next, i) - meta['time_aggregate_model'] += (time.time() - tic) - except Exception: - tic = time.time() - model = helper.load_model_from_BytesIO(data.getbuffer()) - meta['time_aggregate_model'] += (time.time() - tic) - i = i + 1 - - return model, meta - - def monitor(self, config=None): - """ - - :param config: - """ - # status = self.network.check_health() - pass - - def client_allocation_policy_first_available(self): - """ - Allocate client to the first available combiner in the combiner list. - Packs one combiner full before filling up next combiner. - """ - for combiner in self.network.get_combiners(): - if combiner.allowing_clients(): - return combiner - return None - - def client_allocation_policy_least_packed(self): - """ - Allocate client to the available combiner with the smallest number of clients. - Spreads clients evenly over all active combiners. - - TODO: Not thread safe - not garanteed to result in a perfectly even partition. - - """ - min_clients = None - selected_combiner = None - - for combiner in self.network.get_combiners(): - try: - if combiner.allowing_clients(): - combiner_state = combiner.report() - nac = combiner_state['nr_active_clients'] - if not min_clients: - min_clients = nac - selected_combiner = combiner - elif nac < min_clients: - min_clients = nac - selected_combiner = combiner - except CombinerUnavailableError: - print("Combiner was not responding, continuing to next") - - return selected_combiner - - def find(self, name): - """ - - :param name: - :return: - """ - for combiner in self.network.get_combiners(): - if name == combiner.name: - return combiner - return None - - def find_available_combiner(self): - """ - - :return: - """ - combiner = self.client_allocation_policy() - return combiner - - def state(self): - """ - - :return: - """ - return self.__state diff --git a/fedn/fedn/clients/reducer/templates/context.html b/fedn/fedn/clients/reducer/templates/context.html deleted file mode 100644 index 5d814f467..000000000 --- a/fedn/fedn/clients/reducer/templates/context.html +++ /dev/null @@ -1,34 +0,0 @@ -{% extends "index.html" %} - -{% block content %} -
-
-
Upload and set compute package
-
{{ message }}
-
-
-
-
- -
- -
- - -
- -
-
- -
-
- -
-
-
- - -{% endblock %} diff --git a/fedn/fedn/clients/reducer/templates/index.html b/fedn/fedn/clients/reducer/templates/index.html deleted file mode 100644 index a096fa037..000000000 --- a/fedn/fedn/clients/reducer/templates/index.html +++ /dev/null @@ -1,371 +0,0 @@ - - - - - - - - - - {% if refresh %} - - {% endif %} - - FEDn Reducer - - - - - - - - - - - - - - - - -
- -
- - -
-
-

-
-
- {% if message %} - {% if message_type == 'WARNING' %} - -
-
- - -
-
- - - - - - \ No newline at end of file diff --git a/fedn/fedn/combiner.py b/fedn/fedn/combiner.py deleted file mode 100644 index daaab3454..000000000 --- a/fedn/fedn/combiner.py +++ /dev/null @@ -1,685 +0,0 @@ -import base64 -import queue -import re -import signal -import sys -import threading -import time -import uuid -from datetime import datetime, timedelta -from enum import Enum - -import fedn.common.net.grpc.fedn_pb2 as fedn -import fedn.common.net.grpc.fedn_pb2_grpc as rpc -from fedn.clients.combiner.modelservice import ModelService -from fedn.clients.combiner.roundcontrol import RoundControl -from fedn.common.net.connect import ConnectorCombiner, Status -from fedn.common.net.grpc.server import Server -from fedn.common.storage.s3.s3repo import S3ModelRepository -from fedn.common.tracer.mongotracer import MongoTracer - -VALID_NAME_REGEX = '^[a-zA-Z0-9_-]*$' - - -class Role(Enum): - WORKER = 1 - COMBINER = 2 - REDUCER = 3 - OTHER = 4 - - -def role_to_proto_role(role): - """ - - :param role: - :return: - """ - if role == Role.COMBINER: - return fedn.COMBINER - if role == Role.WORKER: - return fedn.WORKER - if role == Role.REDUCER: - return fedn.REDUCER - if role == Role.OTHER: - return fedn.OTHER - - -#################################################################################################################### -#################################################################################################################### - -class Combiner(rpc.CombinerServicer, rpc.ReducerServicer, rpc.ConnectorServicer, rpc.ControlServicer): - """ Combiner gRPC server. """ - - def __init__(self, connect_config): - - # Holds client queues - self.clients = {} - - self.modelservice = ModelService() - - # Validate combiner name - match = re.search(VALID_NAME_REGEX, connect_config['myname']) - if not match: - raise ValueError('Unallowed character in combiner name. Allowed characters: a-z, A-Z, 0-9, _, -.') - - self.id = connect_config['myname'] - self.role = Role.COMBINER - self.max_clients = connect_config['max_clients'] - - self.model_id = None - - announce_client = ConnectorCombiner(host=connect_config['discover_host'], - port=connect_config['discover_port'], - myhost=connect_config['myhost'], - fqdn=connect_config['fqdn'], - myport=connect_config['myport'], - token=connect_config['token'], - name=connect_config['myname'], - secure=connect_config['secure'], - verify=connect_config['verify']) - - response = None - while True: - status, response = announce_client.announce() - if status == Status.TryAgain: - print(response, flush=True) - time.sleep(5) - continue - if status == Status.Assigned: - config = response - print( - "COMBINER: was announced successfully. Waiting for clients and commands!", flush=True) - break - if status == Status.UnAuthorized: - print(response, flush=True) - sys.exit("Exiting: Unauthorized") - - cert = config['certificate'] - key = config['key'] - - if config['certificate']: - cert = base64.b64decode(config['certificate']) # .decode('utf-8') - key = base64.b64decode(config['key']) # .decode('utf-8') - - grpc_config = {'port': connect_config['myport'], - 'secure': connect_config['secure'], - 'certificate': cert, - 'key': key} - - self.repository = S3ModelRepository( - config['storage']['storage_config']) - self.server = Server(self, self.modelservice, grpc_config) - - self.tracer = MongoTracer( - config['statestore']['mongo_config'], config['statestore']['network_id']) - - self.control = RoundControl( - self.id, self.repository, self, self.modelservice) - threading.Thread(target=self.control.run, daemon=True).start() - - self.server.start() - - def __whoami(self, client, instance): - - def role_to_proto_role(role): - """ - - :param role: - :return: - """ - if role == Role.COMBINER: - return fedn.COMBINER - if role == Role.WORKER: - return fedn.WORKER - if role == Role.REDUCER: - return fedn.REDUCER - if role == Role.OTHER: - return fedn.OTHER - - client.name = instance.id - client.role = role_to_proto_role(instance.role) - return client - - def get_active_model(self): - """ - - :return: - """ - return self.model_id - - def set_active_model(self, model_id): - """ - - :param model_id: - """ - self.model_id = model_id - - def report_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None, flush=True): - print("{}:COMBINER({}):{} {}".format(datetime.now().strftime( - '%Y-%m-%d %H:%M:%S'), self.id, log_level, msg), flush=flush) - - def request_model_update(self, model_id, clients=[]): - """ Ask clients to update the current global model. - - Parameters - ---------- - model_id : str - The id of the model to be updated. - clients : list - List of clients to submit a model update request to. - An empty list (default) results in a broadcast to - all connected trainig clients. - - """ - - request = fedn.ModelUpdateRequest() - self.__whoami(request.sender, self) - request.model_id = model_id - request.correlation_id = str(uuid.uuid4()) - request.timestamp = str(datetime.now()) - - if len(clients) == 0: - clients = self.get_active_trainers() - - for client in clients: - request.receiver.name = client.name - request.receiver.role = fedn.WORKER - self.SendModelUpdateRequest(request, self) - - print("COMBINER: Sent model update request for model {} to clients {}".format( - model_id, clients), flush=True) - - def request_model_validation(self, model_id, clients=[]): - """ Ask clients to validate the current global model. - - Parameters - ---------- - model_id : str - The id of the model to be updated. - clients : list - List of clients to submit a model update request to. - An empty list (default) results in a broadcast to - all connected trainig clients. - - """ - - request = fedn.ModelValidationRequest() - self.__whoami(request.sender, self) - request.model_id = model_id - request.correlation_id = str(uuid.uuid4()) - request.timestamp = str(datetime.now()) - - if len(clients) == 0: - clients = self.get_active_validators() - - for client in clients: - request.receiver.name = client.name - request.receiver.role = fedn.WORKER - self.SendModelValidationRequest(request, self) - - print("COMBINER: Sent validation request for model {} to clients {}".format( - model_id, clients), flush=True) - - def _list_clients(self, channel): - request = fedn.ListClientsRequest() - self.__whoami(request.sender, self) - request.channel = channel - clients = self.ListActiveClients(request, self) - return clients.client - - def get_active_trainers(self): - """ - - :return: - """ - trainers = self._list_clients(fedn.Channel.MODEL_UPDATE_REQUESTS) - return trainers - - def get_active_validators(self): - """ - - :return: - """ - validators = self._list_clients(fedn.Channel.MODEL_VALIDATION_REQUESTS) - return validators - - def nr_active_trainers(self): - """ - - :return: - """ - return len(self.get_active_trainers()) - - def nr_active_validators(self): - """ - - :return: - """ - return len(self.get_active_validators()) - - #################################################################################################################### - - def __join_client(self, client): - """ Add a client to the combiner. """ - if client.name not in self.clients.keys(): - self.clients[client.name] = {"lastseen": datetime.now()} - - def _subscribe_client_to_queue(self, client, queue_name): - self.__join_client(client) - if queue_name not in self.clients[client.name].keys(): - self.clients[client.name][queue_name] = queue.Queue() - - def __get_queue(self, client, queue_name): - try: - return self.clients[client.name][queue_name] - except KeyError: - raise - - def __get_status_queue(self, client): - return self.__get_queue(client, fedn.Channel.STATUS) - - def _send_request(self, request, queue_name): - self.__route_request_to_client(request, request.receiver, queue_name) - - def _broadcast_request(self, request, queue_name): - """ Publish a request to all subscribed members. """ - active_clients = self._list_active_clients() - for client in active_clients: - self.clients[client.name][queue_name].put(request) - - def __route_request_to_client(self, request, client, queue_name): - try: - q = self.__get_queue(client, queue_name) - q.put(request) - except Exception: - print("Failed to route request to client: {} {}", - request.receiver, queue_name) - raise - - def _send_status(self, status): - - self.tracer.report(status) - for name, client in self.clients.items(): - try: - q = client[fedn.Channel.STATUS] - status.timestamp = str(datetime.now()) - q.put(status) - except KeyError: - pass - - def __register_heartbeat(self, client): - """ Register a client if first time connecting. Update heartbeat timestamp. """ - self.__join_client(client) - self.clients[client.name]["lastseen"] = datetime.now() - - ##################################################################################################################### - - # Control Service - - def Start(self, control: fedn.ControlRequest, context): - """ Push a round config to RoundControl. - - :param control: - :param context: - :return: - """ - response = fedn.ControlResponse() - print("\n\n GOT CONTROL **START** from Command {}\n\n".format(control.command), flush=True) - - config = {} - for parameter in control.parameter: - config.update({parameter.key: parameter.value}) - print("\n\nSTARTING ROUND AT COMBINER WITH ROUND CONFIG: {}\n\n".format( - config), flush=True) - - self.control.push_round_config(config) - return response - - def Configure(self, control: fedn.ControlRequest, context): - """ - - :param control: - :param context: - :return: - """ - response = fedn.ControlResponse() - for parameter in control.parameter: - setattr(self, parameter.key, parameter.value) - return response - - def Stop(self, control: fedn.ControlRequest, context): - """ - - :param control: - :param context: - :return: - """ - response = fedn.ControlResponse() - print("\n\n\n\n\n GOT CONTROL **STOP** from Command\n\n\n\n\n", flush=True) - return response - - def Report(self, control: fedn.ControlRequest, context): - """ Descibe current state of the Combiner. """ - - response = fedn.ControlResponse() - print("\n\n\n\n\n GOT CONTROL **REPORT** from Command\n\n\n\n\n", flush=True) - - active_trainers = self.get_active_trainers() - p = response.parameter.add() - p.key = "nr_active_trainers" - p.value = str(len(active_trainers)) - - active_validators = self.get_active_validators() - p = response.parameter.add() - p.key = "nr_active_validators" - p.value = str(len(active_validators)) - - active_trainers_ = self.get_active_trainers() - active_trainers = [] - for client in active_trainers_: - active_trainers.append(client) - p = response.parameter.add() - p.key = "active_trainers" - p.value = str(active_trainers) - - active_validators_ = self.get_active_validators() - active_validators = [] - for client in active_validators_: - active_validators.append(client) - p = response.parameter.add() - p.key = "active_validators" - p.value = str(active_validators) - - p = response.parameter.add() - p.key = "nr_active_clients" - p.value = str(len(active_trainers)+len(active_validators)) - - p = response.parameter.add() - p.key = "model_id" - model_id = self.get_active_model() - if model_id is None: - model_id = "" - p.value = str(model_id) - - p = response.parameter.add() - p.key = "nr_unprocessed_compute_plans" - p.value = str(self.control.round_configs.qsize()) - - p = response.parameter.add() - p.key = "name" - p.value = str(self.id) - - return response - - ##################################################################################################################### - - def AllianceStatusStream(self, response, context): - """ A server stream RPC endpoint that emits status messages. """ - status = fedn.Status( - status="Client {} connecting to AllianceStatusStream.".format(response.sender)) - status.log_level = fedn.Status.INFO - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - self._subscribe_client_to_queue(response.sender, fedn.Channel.STATUS) - q = self.__get_queue(response.sender, fedn.Channel.STATUS) - self._send_status(status) - - while True: - yield q.get() - - def SendStatus(self, status: fedn.Status, context): - """ - - :param status: - :param context: - :return: - """ - # Add the status message to all subscribers of the status channel - self._send_status(status) - - response = fedn.Response() - response.response = "Status received." - return response - - def _list_subscribed_clients(self, queue_name): - subscribed_clients = [] - for name, client in self.clients.items(): - if queue_name in client.keys(): - subscribed_clients.append(name) - return subscribed_clients - - def _list_active_clients(self, channel): - active_clients = [] - for client in self._list_subscribed_clients(channel): - # This can break with different timezones. - now = datetime.now() - then = self.clients[client]["lastseen"] - # TODO: move the heartbeat timeout to config. - if (now - then) < timedelta(seconds=10): - active_clients.append(client) - return active_clients - - def _drop_inactive_clients(self): - """ Clean up clients that has missed heartbeat """ - - def ListActiveClients(self, request: fedn.ListClientsRequest, context): - """ RPC endpoint that returns a ClientList containing the names of all active clients. - An active client has sent a status message / responded to a heartbeat - request in the last 10 seconds. - """ - clients = fedn.ClientList() - active_clients = self._list_active_clients(request.channel) - - for client in active_clients: - clients.client.append(fedn.Client(name=client, role=fedn.WORKER)) - return clients - - def AcceptingClients(self, request: fedn.ConnectionRequest, context): - """ - - :param request: - :param context: - :return: - """ - response = fedn.ConnectionResponse() - active_clients = self._list_active_clients( - fedn.Channel.MODEL_UPDATE_REQUESTS) - - try: - requested = int(self.max_clients) - if len(active_clients) >= requested: - response.status = fedn.ConnectionStatus.NOT_ACCEPTING - return response - if len(active_clients) < requested: - response.status = fedn.ConnectionStatus.ACCEPTING - return response - - except Exception as e: - print("Combiner not properly configured! {}".format(e), flush=True) - raise - - response.status = fedn.ConnectionStatus.TRY_AGAIN_LATER - return response - - def SendHeartbeat(self, heartbeat: fedn.Heartbeat, context): - """ RPC that lets clients send a hearbeat, notifying the server that - the client is available. """ - self.__register_heartbeat(heartbeat.sender) - response = fedn.Response() - response.sender.name = heartbeat.sender.name - response.sender.role = heartbeat.sender.role - response.response = "Heartbeat received" - return response - - # Combiner Service - - def ModelUpdateStream(self, update, context): - """ - - :param update: - :param context: - """ - client = update.sender - status = fedn.Status( - status="Client {} connecting to ModelUpdateStream.".format(client.name)) - status.log_level = fedn.Status.INFO - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - - self._subscribe_client_to_queue(client, fedn.Channel.MODEL_UPDATES) - q = self.__get_queue(client, fedn.Channel.MODEL_UPDATES) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def ModelUpdateRequestStream(self, response, context): - """ A server stream RPC endpoint. Messages from client stream. """ - - client = response.sender - metadata = context.invocation_metadata() - if metadata: - print("\n\n\nGOT METADATA: {}\n\n\n".format(metadata), flush=True) - - status = fedn.Status( - status="Client {} connecting to ModelUpdateRequestStream.".format(client.name)) - status.log_level = fedn.Status.INFO - status.timestamp = str(datetime.now()) - - self.__whoami(status.sender, self) - - self._subscribe_client_to_queue( - client, fedn.Channel.MODEL_UPDATE_REQUESTS) - q = self.__get_queue(client, fedn.Channel.MODEL_UPDATE_REQUESTS) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def ModelValidationStream(self, update, context): - """ - - :param update: - :param context: - """ - client = update.sender - status = fedn.Status( - status="Client {} connecting to ModelValidationStream.".format(client.name)) - status.log_level = fedn.Status.INFO - - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - - self._subscribe_client_to_queue(client, fedn.Channel.MODEL_VALIDATIONS) - q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATIONS) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def ModelValidationRequestStream(self, response, context): - """ A server stream RPC endpoint. Messages from client stream. """ - - client = response.sender - status = fedn.Status( - status="Client {} connecting to ModelValidationRequestStream.".format(client.name)) - status.log_level = fedn.Status.INFO - status.sender.name = self.id - status.sender.role = role_to_proto_role(self.role) - status.timestamp = str(datetime.now()) - - self._subscribe_client_to_queue( - client, fedn.Channel.MODEL_VALIDATION_REQUESTS) - q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATION_REQUESTS) - - self._send_status(status) - - while context.is_active(): - try: - yield q.get(timeout=1.0) - except queue.Empty: - pass - - def SendModelUpdateRequest(self, request, context): - """ Send a model update request. """ - self._send_request(request, fedn.Channel.MODEL_UPDATE_REQUESTS) - - response = fedn.Response() - response.response = "CONTROLLER RECEIVED ModelUpdateRequest from client {}".format( - request.sender.name) - return response # TODO Fill later - - def SendModelUpdate(self, request, context): - """ Send a model update response. """ - self.control.aggregator.on_model_update(request.model_update_id) - print("ORCHESTRATOR: Received model update", flush=True) - - response = fedn.Response() - response.response = "RECEIVED ModelUpdate {} from client {}".format( - response, response.sender.name) - return response # TODO Fill later - - def SendModelValidationRequest(self, request, context): - """ Send a model update request. """ - self._send_request(request, fedn.Channel.MODEL_VALIDATION_REQUESTS) - - response = fedn.Response() - response.response = "CONTROLLER RECEIVED ModelValidationRequest from client {}".format( - request.sender.name) - return response # TODO Fill later - - def SendModelValidation(self, request, context): - """ Send a model update response. """ - self.control.aggregator.on_model_validation(request) - print("ORCHESTRATOR received validation ", flush=True) - response = fedn.Response() - response.response = "RECEIVED ModelValidation {} from client {}".format( - response, response.sender.name) - return response # TODO Fill later - - # Reducer Service - - def GetGlobalModel(self, request, context): - """ - - :param request: - :param context: - :return: - """ - response = fedn.GetGlobalModelResponse() - self.__whoami(response.sender, self) - response.receiver.name = "reducer" - response.receiver.role = role_to_proto_role(Role.REDUCER) - if not self.get_active_model(): - response.model_id = '' - else: - response.model_id = self.get_active_model() - return response - - #################################################################################################################### - - def run(self): - """ - - """ - - print("COMBINER: {} started, ready for requests. ".format( - self.id), flush=True) - try: - while True: - signal.pause() - except (KeyboardInterrupt, SystemExit): - pass - self.server.stop() diff --git a/fedn/fedn/common/net/grpc/fedn.proto b/fedn/fedn/common/net/grpc/fedn.proto index 0e595b451..ff0ee293c 100644 --- a/fedn/fedn/common/net/grpc/fedn.proto +++ b/fedn/fedn/common/net/grpc/fedn.proto @@ -4,7 +4,6 @@ package grpc; message Response { Client sender = 1; - //string client = 1; string response = 2; } @@ -14,11 +13,11 @@ enum StatusType { MODEL_UPDATE = 2; MODEL_VALIDATION_REQUEST = 3; MODEL_VALIDATION = 4; + INFERENCE = 5; } message Status { Client sender = 1; - //string client = 1; string status = 2; enum LogLevel { @@ -54,6 +53,7 @@ message ModelUpdateRequest { string data = 4; string correlation_id = 5; string timestamp = 6; + string meta = 7; } message ModelUpdate { @@ -73,6 +73,8 @@ message ModelValidationRequest { string data = 4; string correlation_id = 5; string timestamp = 6; + string meta = 7; + bool is_inference = 8; } message ModelValidation { @@ -91,6 +93,7 @@ enum ModelStatus { IN_PROGRESS_OK = 2; FAILED = 3; } + message ModelRequest { Client sender = 1; Client receiver = 2; @@ -200,7 +203,8 @@ message ReportResponse { service Control { rpc Start(ControlRequest) returns (ControlResponse); rpc Stop(ControlRequest) returns (ControlResponse); - rpc Configure(ControlRequest) returns (ReportResponse); + rpc Configure(ControlRequest) returns (ReportResponse); + rpc FlushAggregationQueue(ControlRequest) returns (ControlResponse); rpc Report(ControlRequest) returns (ReportResponse); } diff --git a/fedn/fedn/common/net/grpc/fedn_pb2.py b/fedn/fedn/common/net/grpc/fedn_pb2.py index f53fd40e6..fa4fbb16d 100644 --- a/fedn/fedn/common/net/grpc/fedn_pb2.py +++ b/fedn/fedn/common/net/grpc/fedn_pb2.py @@ -2,266 +2,39 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: fedn/common/net/grpc/fedn.proto """Generated protocol buffer code.""" +from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import enum_type_wrapper - # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor.FileDescriptor( - name='fedn/common/net/grpc/fedn.proto', - package='grpc', - syntax='proto3', - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x1f\x66\x65\x64n/common/net/grpc/fedn.proto\x12\x04grpc\":\n\x08Response\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08response\x18\x02 \x01(\t\"\x8c\x02\n\x06Status\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06status\x18\x02 \x01(\t\x12(\n\tlog_level\x18\x03 \x01(\x0e\x32\x15.grpc.Status.LogLevel\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x1e\n\x04type\x18\x07 \x01(\x0e\x32\x10.grpc.StatusType\x12\r\n\x05\x65xtra\x18\x08 \x01(\t\"B\n\x08LogLevel\x12\x08\n\x04INFO\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\t\n\x05\x41UDIT\x10\x04\"\x9d\x01\n\x12ModelUpdateRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\"\xaf\x01\n\x0bModelUpdate\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x17\n\x0fmodel_update_id\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\xa1\x01\n\x16ModelValidationRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\"\xa8\x01\n\x0fModelValidation\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\x89\x01\n\x0cModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\n\n\x02id\x18\x04 \x01(\t\x12!\n\x06status\x18\x05 \x01(\x0e\x32\x11.grpc.ModelStatus\"]\n\rModelResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\n\n\x02id\x18\x02 \x01(\t\x12!\n\x06status\x18\x03 \x01(\x0e\x32\x11.grpc.ModelStatus\x12\x0f\n\x07message\x18\x04 \x01(\t\"U\n\x15GetGlobalModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\"h\n\x16GetGlobalModelResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\")\n\tHeartbeat\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\"W\n\x16\x43lientAvailableMessage\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\t\"R\n\x12ListClientsRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x07\x63hannel\x18\x02 \x01(\x0e\x32\r.grpc.Channel\"*\n\nClientList\x12\x1c\n\x06\x63lient\x18\x01 \x03(\x0b\x32\x0c.grpc.Client\"0\n\x06\x43lient\x12\x18\n\x04role\x18\x01 \x01(\x0e\x32\n.grpc.Role\x12\x0c\n\x04name\x18\x02 \x01(\t\"m\n\x0fReassignRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06server\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\r\"c\n\x10ReconnectRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x11\n\treconnect\x18\x03 \x01(\r\"\'\n\tParameter\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"T\n\x0e\x43ontrolRequest\x12\x1e\n\x07\x63ommand\x18\x01 \x01(\x0e\x32\r.grpc.Command\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"F\n\x0f\x43ontrolResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"R\n\x0eReportResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"\x13\n\x11\x43onnectionRequest\"<\n\x12\x43onnectionResponse\x12&\n\x06status\x18\x01 \x01(\x0e\x32\x16.grpc.ConnectionStatus*u\n\nStatusType\x12\x07\n\x03LOG\x10\x00\x12\x18\n\x14MODEL_UPDATE_REQUEST\x10\x01\x12\x10\n\x0cMODEL_UPDATE\x10\x02\x12\x1c\n\x18MODEL_VALIDATION_REQUEST\x10\x03\x12\x14\n\x10MODEL_VALIDATION\x10\x04*\x86\x01\n\x07\x43hannel\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x19\n\x15MODEL_UPDATE_REQUESTS\x10\x01\x12\x11\n\rMODEL_UPDATES\x10\x02\x12\x1d\n\x19MODEL_VALIDATION_REQUESTS\x10\x03\x12\x15\n\x11MODEL_VALIDATIONS\x10\x04\x12\n\n\x06STATUS\x10\x05*F\n\x0bModelStatus\x12\x06\n\x02OK\x10\x00\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x12\n\x0eIN_PROGRESS_OK\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03*8\n\x04Role\x12\n\n\x06WORKER\x10\x00\x12\x0c\n\x08\x43OMBINER\x10\x01\x12\x0b\n\x07REDUCER\x10\x02\x12\t\n\x05OTHER\x10\x03*J\n\x07\x43ommand\x12\x08\n\x04IDLE\x10\x00\x12\t\n\x05START\x10\x01\x12\t\n\x05PAUSE\x10\x02\x12\x08\n\x04STOP\x10\x03\x12\t\n\x05RESET\x10\x04\x12\n\n\x06REPORT\x10\x05*I\n\x10\x43onnectionStatus\x12\x11\n\rNOT_ACCEPTING\x10\x00\x12\r\n\tACCEPTING\x10\x01\x12\x13\n\x0fTRY_AGAIN_LATER\x10\x02\x32z\n\x0cModelService\x12\x33\n\x06Upload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse(\x01\x12\x35\n\x08\x44ownload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse0\x01\x32\xe3\x01\n\x07\x43ontrol\x12\x34\n\x05Start\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x33\n\x04Stop\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x37\n\tConfigure\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse\x12\x34\n\x06Report\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse2V\n\x07Reducer\x12K\n\x0eGetGlobalModel\x12\x1b.grpc.GetGlobalModelRequest\x1a\x1c.grpc.GetGlobalModelResponse2\xab\x03\n\tConnector\x12\x44\n\x14\x41llianceStatusStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x0c.grpc.Status0\x01\x12*\n\nSendStatus\x12\x0c.grpc.Status\x1a\x0e.grpc.Response\x12?\n\x11ListActiveClients\x12\x18.grpc.ListClientsRequest\x1a\x10.grpc.ClientList\x12\x45\n\x10\x41\x63\x63\x65ptingClients\x12\x17.grpc.ConnectionRequest\x1a\x18.grpc.ConnectionResponse\x12\x30\n\rSendHeartbeat\x12\x0f.grpc.Heartbeat\x1a\x0e.grpc.Response\x12\x37\n\x0eReassignClient\x12\x15.grpc.ReassignRequest\x1a\x0e.grpc.Response\x12\x39\n\x0fReconnectClient\x12\x16.grpc.ReconnectRequest\x1a\x0e.grpc.Response2\xda\x04\n\x08\x43ombiner\x12T\n\x18ModelUpdateRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x18.grpc.ModelUpdateRequest0\x01\x12\x46\n\x11ModelUpdateStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x11.grpc.ModelUpdate0\x01\x12\\\n\x1cModelValidationRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x1c.grpc.ModelValidationRequest0\x01\x12N\n\x15ModelValidationStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x15.grpc.ModelValidation0\x01\x12\x42\n\x16SendModelUpdateRequest\x12\x18.grpc.ModelUpdateRequest\x1a\x0e.grpc.Response\x12\x34\n\x0fSendModelUpdate\x12\x11.grpc.ModelUpdate\x1a\x0e.grpc.Response\x12J\n\x1aSendModelValidationRequest\x12\x1c.grpc.ModelValidationRequest\x1a\x0e.grpc.Response\x12<\n\x13SendModelValidation\x12\x15.grpc.ModelValidation\x1a\x0e.grpc.Responseb\x06proto3' -) -_STATUSTYPE = _descriptor.EnumDescriptor( - name='StatusType', - full_name='grpc.StatusType', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='LOG', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATE_REQUEST', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATE', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATION_REQUEST', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATION', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2361, - serialized_end=2478, -) -_sym_db.RegisterEnumDescriptor(_STATUSTYPE) -StatusType = enum_type_wrapper.EnumTypeWrapper(_STATUSTYPE) -_CHANNEL = _descriptor.EnumDescriptor( - name='Channel', - full_name='grpc.Channel', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='DEFAULT', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATE_REQUESTS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_UPDATES', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATION_REQUESTS', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='MODEL_VALIDATIONS', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STATUS', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2481, - serialized_end=2615, -) -_sym_db.RegisterEnumDescriptor(_CHANNEL) -Channel = enum_type_wrapper.EnumTypeWrapper(_CHANNEL) -_MODELSTATUS = _descriptor.EnumDescriptor( - name='ModelStatus', - full_name='grpc.ModelStatus', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='OK', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='IN_PROGRESS', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='IN_PROGRESS_OK', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='FAILED', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2617, - serialized_end=2687, -) -_sym_db.RegisterEnumDescriptor(_MODELSTATUS) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1f\x66\x65\x64n/common/net/grpc/fedn.proto\x12\x04grpc\":\n\x08Response\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08response\x18\x02 \x01(\t\"\x8c\x02\n\x06Status\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06status\x18\x02 \x01(\t\x12(\n\tlog_level\x18\x03 \x01(\x0e\x32\x15.grpc.Status.LogLevel\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x1e\n\x04type\x18\x07 \x01(\x0e\x32\x10.grpc.StatusType\x12\r\n\x05\x65xtra\x18\x08 \x01(\t\"B\n\x08LogLevel\x12\x08\n\x04INFO\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\t\n\x05\x41UDIT\x10\x04\"\xab\x01\n\x12ModelUpdateRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\xaf\x01\n\x0bModelUpdate\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x17\n\x0fmodel_update_id\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\xc5\x01\n\x16ModelValidationRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\x12\x14\n\x0cis_inference\x18\x08 \x01(\x08\"\xa8\x01\n\x0fModelValidation\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x04 \x01(\t\x12\x16\n\x0e\x63orrelation_id\x18\x05 \x01(\t\x12\x11\n\ttimestamp\x18\x06 \x01(\t\x12\x0c\n\x04meta\x18\x07 \x01(\t\"\x89\x01\n\x0cModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12\n\n\x02id\x18\x04 \x01(\t\x12!\n\x06status\x18\x05 \x01(\x0e\x32\x11.grpc.ModelStatus\"]\n\rModelResponse\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\n\n\x02id\x18\x02 \x01(\t\x12!\n\x06status\x18\x03 \x01(\x0e\x32\x11.grpc.ModelStatus\x12\x0f\n\x07message\x18\x04 \x01(\t\"U\n\x15GetGlobalModelRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\"h\n\x16GetGlobalModelResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x10\n\x08model_id\x18\x03 \x01(\t\")\n\tHeartbeat\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\"W\n\x16\x43lientAvailableMessage\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\t\x12\x11\n\ttimestamp\x18\x03 \x01(\t\"R\n\x12ListClientsRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x07\x63hannel\x18\x02 \x01(\x0e\x32\r.grpc.Channel\"*\n\nClientList\x12\x1c\n\x06\x63lient\x18\x01 \x03(\x0b\x32\x0c.grpc.Client\"0\n\x06\x43lient\x12\x18\n\x04role\x18\x01 \x01(\x0e\x32\n.grpc.Role\x12\x0c\n\x04name\x18\x02 \x01(\t\"m\n\x0fReassignRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x0e\n\x06server\x18\x03 \x01(\t\x12\x0c\n\x04port\x18\x04 \x01(\r\"c\n\x10ReconnectRequest\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\x1e\n\x08receiver\x18\x02 \x01(\x0b\x32\x0c.grpc.Client\x12\x11\n\treconnect\x18\x03 \x01(\r\"\'\n\tParameter\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"T\n\x0e\x43ontrolRequest\x12\x1e\n\x07\x63ommand\x18\x01 \x01(\x0e\x32\r.grpc.Command\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"F\n\x0f\x43ontrolResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"R\n\x0eReportResponse\x12\x1c\n\x06sender\x18\x01 \x01(\x0b\x32\x0c.grpc.Client\x12\"\n\tparameter\x18\x02 \x03(\x0b\x32\x0f.grpc.Parameter\"\x13\n\x11\x43onnectionRequest\"<\n\x12\x43onnectionResponse\x12&\n\x06status\x18\x01 \x01(\x0e\x32\x16.grpc.ConnectionStatus*\x84\x01\n\nStatusType\x12\x07\n\x03LOG\x10\x00\x12\x18\n\x14MODEL_UPDATE_REQUEST\x10\x01\x12\x10\n\x0cMODEL_UPDATE\x10\x02\x12\x1c\n\x18MODEL_VALIDATION_REQUEST\x10\x03\x12\x14\n\x10MODEL_VALIDATION\x10\x04\x12\r\n\tINFERENCE\x10\x05*\x86\x01\n\x07\x43hannel\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x19\n\x15MODEL_UPDATE_REQUESTS\x10\x01\x12\x11\n\rMODEL_UPDATES\x10\x02\x12\x1d\n\x19MODEL_VALIDATION_REQUESTS\x10\x03\x12\x15\n\x11MODEL_VALIDATIONS\x10\x04\x12\n\n\x06STATUS\x10\x05*F\n\x0bModelStatus\x12\x06\n\x02OK\x10\x00\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x12\n\x0eIN_PROGRESS_OK\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03*8\n\x04Role\x12\n\n\x06WORKER\x10\x00\x12\x0c\n\x08\x43OMBINER\x10\x01\x12\x0b\n\x07REDUCER\x10\x02\x12\t\n\x05OTHER\x10\x03*J\n\x07\x43ommand\x12\x08\n\x04IDLE\x10\x00\x12\t\n\x05START\x10\x01\x12\t\n\x05PAUSE\x10\x02\x12\x08\n\x04STOP\x10\x03\x12\t\n\x05RESET\x10\x04\x12\n\n\x06REPORT\x10\x05*I\n\x10\x43onnectionStatus\x12\x11\n\rNOT_ACCEPTING\x10\x00\x12\r\n\tACCEPTING\x10\x01\x12\x13\n\x0fTRY_AGAIN_LATER\x10\x02\x32z\n\x0cModelService\x12\x33\n\x06Upload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse(\x01\x12\x35\n\x08\x44ownload\x12\x12.grpc.ModelRequest\x1a\x13.grpc.ModelResponse0\x01\x32\xa9\x02\n\x07\x43ontrol\x12\x34\n\x05Start\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x33\n\x04Stop\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x37\n\tConfigure\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse\x12\x44\n\x15\x46lushAggregationQueue\x12\x14.grpc.ControlRequest\x1a\x15.grpc.ControlResponse\x12\x34\n\x06Report\x12\x14.grpc.ControlRequest\x1a\x14.grpc.ReportResponse2V\n\x07Reducer\x12K\n\x0eGetGlobalModel\x12\x1b.grpc.GetGlobalModelRequest\x1a\x1c.grpc.GetGlobalModelResponse2\xab\x03\n\tConnector\x12\x44\n\x14\x41llianceStatusStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x0c.grpc.Status0\x01\x12*\n\nSendStatus\x12\x0c.grpc.Status\x1a\x0e.grpc.Response\x12?\n\x11ListActiveClients\x12\x18.grpc.ListClientsRequest\x1a\x10.grpc.ClientList\x12\x45\n\x10\x41\x63\x63\x65ptingClients\x12\x17.grpc.ConnectionRequest\x1a\x18.grpc.ConnectionResponse\x12\x30\n\rSendHeartbeat\x12\x0f.grpc.Heartbeat\x1a\x0e.grpc.Response\x12\x37\n\x0eReassignClient\x12\x15.grpc.ReassignRequest\x1a\x0e.grpc.Response\x12\x39\n\x0fReconnectClient\x12\x16.grpc.ReconnectRequest\x1a\x0e.grpc.Response2\xda\x04\n\x08\x43ombiner\x12T\n\x18ModelUpdateRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x18.grpc.ModelUpdateRequest0\x01\x12\x46\n\x11ModelUpdateStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x11.grpc.ModelUpdate0\x01\x12\\\n\x1cModelValidationRequestStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x1c.grpc.ModelValidationRequest0\x01\x12N\n\x15ModelValidationStream\x12\x1c.grpc.ClientAvailableMessage\x1a\x15.grpc.ModelValidation0\x01\x12\x42\n\x16SendModelUpdateRequest\x12\x18.grpc.ModelUpdateRequest\x1a\x0e.grpc.Response\x12\x34\n\x0fSendModelUpdate\x12\x11.grpc.ModelUpdate\x1a\x0e.grpc.Response\x12J\n\x1aSendModelValidationRequest\x12\x1c.grpc.ModelValidationRequest\x1a\x0e.grpc.Response\x12<\n\x13SendModelValidation\x12\x15.grpc.ModelValidation\x1a\x0e.grpc.Responseb\x06proto3') +_STATUSTYPE = DESCRIPTOR.enum_types_by_name['StatusType'] +StatusType = enum_type_wrapper.EnumTypeWrapper(_STATUSTYPE) +_CHANNEL = DESCRIPTOR.enum_types_by_name['Channel'] +Channel = enum_type_wrapper.EnumTypeWrapper(_CHANNEL) +_MODELSTATUS = DESCRIPTOR.enum_types_by_name['ModelStatus'] ModelStatus = enum_type_wrapper.EnumTypeWrapper(_MODELSTATUS) -_ROLE = _descriptor.EnumDescriptor( - name='Role', - full_name='grpc.Role', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='WORKER', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='COMBINER', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='REDUCER', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='OTHER', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2689, - serialized_end=2745, -) -_sym_db.RegisterEnumDescriptor(_ROLE) - +_ROLE = DESCRIPTOR.enum_types_by_name['Role'] Role = enum_type_wrapper.EnumTypeWrapper(_ROLE) -_COMMAND = _descriptor.EnumDescriptor( - name='Command', - full_name='grpc.Command', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='IDLE', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='START', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='PAUSE', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='STOP', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='RESET', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='REPORT', index=5, number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2747, - serialized_end=2821, -) -_sym_db.RegisterEnumDescriptor(_COMMAND) - +_COMMAND = DESCRIPTOR.enum_types_by_name['Command'] Command = enum_type_wrapper.EnumTypeWrapper(_COMMAND) -_CONNECTIONSTATUS = _descriptor.EnumDescriptor( - name='ConnectionStatus', - full_name='grpc.ConnectionStatus', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='NOT_ACCEPTING', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ACCEPTING', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='TRY_AGAIN_LATER', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=2823, - serialized_end=2896, -) -_sym_db.RegisterEnumDescriptor(_CONNECTIONSTATUS) - +_CONNECTIONSTATUS = DESCRIPTOR.enum_types_by_name['ConnectionStatus'] ConnectionStatus = enum_type_wrapper.EnumTypeWrapper(_CONNECTIONSTATUS) LOG = 0 MODEL_UPDATE_REQUEST = 1 MODEL_UPDATE = 2 MODEL_VALIDATION_REQUEST = 3 MODEL_VALIDATION = 4 +INFERENCE = 5 DEFAULT = 0 MODEL_UPDATE_REQUESTS = 1 MODEL_UPDATES = 2 @@ -286,1646 +59,268 @@ ACCEPTING = 1 TRY_AGAIN_LATER = 2 -_STATUS_LOGLEVEL = _descriptor.EnumDescriptor( - name='LogLevel', - full_name='grpc.Status.LogLevel', - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name='INFO', index=0, number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='DEBUG', index=1, number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='WARNING', index=2, number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='ERROR', index=3, number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - _descriptor.EnumValueDescriptor( - name='AUDIT', index=4, number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key), - ], - containing_type=None, - serialized_options=None, - serialized_start=304, - serialized_end=370, -) -_sym_db.RegisterEnumDescriptor(_STATUS_LOGLEVEL) - -_RESPONSE = _descriptor.Descriptor( - name='Response', - full_name='grpc.Response', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.Response.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='response', full_name='grpc.Response.response', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=41, - serialized_end=99, -) - -_STATUS = _descriptor.Descriptor( - name='Status', - full_name='grpc.Status', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.Status.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='status', full_name='grpc.Status.status', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='log_level', full_name='grpc.Status.log_level', index=2, - number=3, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.Status.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.Status.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.Status.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='type', full_name='grpc.Status.type', index=6, - number=7, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='extra', full_name='grpc.Status.extra', index=7, - number=8, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _STATUS_LOGLEVEL, - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=102, - serialized_end=370, -) - -_MODELUPDATEREQUEST = _descriptor.Descriptor( - name='ModelUpdateRequest', - full_name='grpc.ModelUpdateRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelUpdateRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelUpdateRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelUpdateRequest.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelUpdateRequest.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelUpdateRequest.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelUpdateRequest.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=373, - serialized_end=530, -) - -_MODELUPDATE = _descriptor.Descriptor( - name='ModelUpdate', - full_name='grpc.ModelUpdate', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelUpdate.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelUpdate.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelUpdate.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_update_id', full_name='grpc.ModelUpdate.model_update_id', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelUpdate.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelUpdate.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='meta', full_name='grpc.ModelUpdate.meta', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=533, - serialized_end=708, -) - -_MODELVALIDATIONREQUEST = _descriptor.Descriptor( - name='ModelValidationRequest', - full_name='grpc.ModelValidationRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelValidationRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelValidationRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelValidationRequest.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelValidationRequest.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelValidationRequest.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelValidationRequest.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=711, - serialized_end=872, -) - -_MODELVALIDATION = _descriptor.Descriptor( - name='ModelValidation', - full_name='grpc.ModelValidation', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelValidation.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelValidation.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.ModelValidation.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelValidation.data', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='correlation_id', full_name='grpc.ModelValidation.correlation_id', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ModelValidation.timestamp', index=5, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='meta', full_name='grpc.ModelValidation.meta', index=6, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=875, - serialized_end=1043, -) - -_MODELREQUEST = _descriptor.Descriptor( - name='ModelRequest', - full_name='grpc.ModelRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ModelRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ModelRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelRequest.data', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='id', full_name='grpc.ModelRequest.id', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='status', full_name='grpc.ModelRequest.status', index=4, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1046, - serialized_end=1183, -) - -_MODELRESPONSE = _descriptor.Descriptor( - name='ModelResponse', - full_name='grpc.ModelResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ModelResponse.data', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=b"", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='id', full_name='grpc.ModelResponse.id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='status', full_name='grpc.ModelResponse.status', index=2, - number=3, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='message', full_name='grpc.ModelResponse.message', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1185, - serialized_end=1278, -) - -_GETGLOBALMODELREQUEST = _descriptor.Descriptor( - name='GetGlobalModelRequest', - full_name='grpc.GetGlobalModelRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.GetGlobalModelRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.GetGlobalModelRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1280, - serialized_end=1365, -) - -_GETGLOBALMODELRESPONSE = _descriptor.Descriptor( - name='GetGlobalModelResponse', - full_name='grpc.GetGlobalModelResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.GetGlobalModelResponse.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.GetGlobalModelResponse.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='model_id', full_name='grpc.GetGlobalModelResponse.model_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1367, - serialized_end=1471, -) - -_HEARTBEAT = _descriptor.Descriptor( - name='Heartbeat', - full_name='grpc.Heartbeat', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.Heartbeat.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1473, - serialized_end=1514, -) - -_CLIENTAVAILABLEMESSAGE = _descriptor.Descriptor( - name='ClientAvailableMessage', - full_name='grpc.ClientAvailableMessage', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ClientAvailableMessage.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='data', full_name='grpc.ClientAvailableMessage.data', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='timestamp', full_name='grpc.ClientAvailableMessage.timestamp', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1516, - serialized_end=1603, -) - -_LISTCLIENTSREQUEST = _descriptor.Descriptor( - name='ListClientsRequest', - full_name='grpc.ListClientsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ListClientsRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='channel', full_name='grpc.ListClientsRequest.channel', index=1, - number=2, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1605, - serialized_end=1687, -) - -_CLIENTLIST = _descriptor.Descriptor( - name='ClientList', - full_name='grpc.ClientList', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='client', full_name='grpc.ClientList.client', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1689, - serialized_end=1731, -) - -_CLIENT = _descriptor.Descriptor( - name='Client', - full_name='grpc.Client', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='role', full_name='grpc.Client.role', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='name', full_name='grpc.Client.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1733, - serialized_end=1781, -) - -_REASSIGNREQUEST = _descriptor.Descriptor( - name='ReassignRequest', - full_name='grpc.ReassignRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ReassignRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ReassignRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='server', full_name='grpc.ReassignRequest.server', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='port', full_name='grpc.ReassignRequest.port', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1783, - serialized_end=1892, -) - -_RECONNECTREQUEST = _descriptor.Descriptor( - name='ReconnectRequest', - full_name='grpc.ReconnectRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ReconnectRequest.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='receiver', full_name='grpc.ReconnectRequest.receiver', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='reconnect', full_name='grpc.ReconnectRequest.reconnect', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1894, - serialized_end=1993, -) - -_PARAMETER = _descriptor.Descriptor( - name='Parameter', - full_name='grpc.Parameter', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='grpc.Parameter.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='value', full_name='grpc.Parameter.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1995, - serialized_end=2034, -) - -_CONTROLREQUEST = _descriptor.Descriptor( - name='ControlRequest', - full_name='grpc.ControlRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='command', full_name='grpc.ControlRequest.command', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameter', full_name='grpc.ControlRequest.parameter', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2036, - serialized_end=2120, -) - -_CONTROLRESPONSE = _descriptor.Descriptor( - name='ControlResponse', - full_name='grpc.ControlResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='message', full_name='grpc.ControlResponse.message', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameter', full_name='grpc.ControlResponse.parameter', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2122, - serialized_end=2192, -) - -_REPORTRESPONSE = _descriptor.Descriptor( - name='ReportResponse', - full_name='grpc.ReportResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='sender', full_name='grpc.ReportResponse.sender', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='parameter', full_name='grpc.ReportResponse.parameter', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2194, - serialized_end=2276, -) - -_CONNECTIONREQUEST = _descriptor.Descriptor( - name='ConnectionRequest', - full_name='grpc.ConnectionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2278, - serialized_end=2297, -) - -_CONNECTIONRESPONSE = _descriptor.Descriptor( - name='ConnectionResponse', - full_name='grpc.ConnectionResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='status', full_name='grpc.ConnectionResponse.status', index=0, - number=1, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2299, - serialized_end=2359, -) - -_RESPONSE.fields_by_name['sender'].message_type = _CLIENT -_STATUS.fields_by_name['sender'].message_type = _CLIENT -_STATUS.fields_by_name['log_level'].enum_type = _STATUS_LOGLEVEL -_STATUS.fields_by_name['type'].enum_type = _STATUSTYPE -_STATUS_LOGLEVEL.containing_type = _STATUS -_MODELUPDATEREQUEST.fields_by_name['sender'].message_type = _CLIENT -_MODELUPDATEREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_MODELUPDATE.fields_by_name['sender'].message_type = _CLIENT -_MODELUPDATE.fields_by_name['receiver'].message_type = _CLIENT -_MODELVALIDATIONREQUEST.fields_by_name['sender'].message_type = _CLIENT -_MODELVALIDATIONREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_MODELVALIDATION.fields_by_name['sender'].message_type = _CLIENT -_MODELVALIDATION.fields_by_name['receiver'].message_type = _CLIENT -_MODELREQUEST.fields_by_name['sender'].message_type = _CLIENT -_MODELREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_MODELREQUEST.fields_by_name['status'].enum_type = _MODELSTATUS -_MODELRESPONSE.fields_by_name['status'].enum_type = _MODELSTATUS -_GETGLOBALMODELREQUEST.fields_by_name['sender'].message_type = _CLIENT -_GETGLOBALMODELREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_GETGLOBALMODELRESPONSE.fields_by_name['sender'].message_type = _CLIENT -_GETGLOBALMODELRESPONSE.fields_by_name['receiver'].message_type = _CLIENT -_HEARTBEAT.fields_by_name['sender'].message_type = _CLIENT -_CLIENTAVAILABLEMESSAGE.fields_by_name['sender'].message_type = _CLIENT -_LISTCLIENTSREQUEST.fields_by_name['sender'].message_type = _CLIENT -_LISTCLIENTSREQUEST.fields_by_name['channel'].enum_type = _CHANNEL -_CLIENTLIST.fields_by_name['client'].message_type = _CLIENT -_CLIENT.fields_by_name['role'].enum_type = _ROLE -_REASSIGNREQUEST.fields_by_name['sender'].message_type = _CLIENT -_REASSIGNREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_RECONNECTREQUEST.fields_by_name['sender'].message_type = _CLIENT -_RECONNECTREQUEST.fields_by_name['receiver'].message_type = _CLIENT -_CONTROLREQUEST.fields_by_name['command'].enum_type = _COMMAND -_CONTROLREQUEST.fields_by_name['parameter'].message_type = _PARAMETER -_CONTROLRESPONSE.fields_by_name['parameter'].message_type = _PARAMETER -_REPORTRESPONSE.fields_by_name['sender'].message_type = _CLIENT -_REPORTRESPONSE.fields_by_name['parameter'].message_type = _PARAMETER -_CONNECTIONRESPONSE.fields_by_name['status'].enum_type = _CONNECTIONSTATUS -DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE -DESCRIPTOR.message_types_by_name['Status'] = _STATUS -DESCRIPTOR.message_types_by_name['ModelUpdateRequest'] = _MODELUPDATEREQUEST -DESCRIPTOR.message_types_by_name['ModelUpdate'] = _MODELUPDATE -DESCRIPTOR.message_types_by_name['ModelValidationRequest'] = _MODELVALIDATIONREQUEST -DESCRIPTOR.message_types_by_name['ModelValidation'] = _MODELVALIDATION -DESCRIPTOR.message_types_by_name['ModelRequest'] = _MODELREQUEST -DESCRIPTOR.message_types_by_name['ModelResponse'] = _MODELRESPONSE -DESCRIPTOR.message_types_by_name['GetGlobalModelRequest'] = _GETGLOBALMODELREQUEST -DESCRIPTOR.message_types_by_name['GetGlobalModelResponse'] = _GETGLOBALMODELRESPONSE -DESCRIPTOR.message_types_by_name['Heartbeat'] = _HEARTBEAT -DESCRIPTOR.message_types_by_name['ClientAvailableMessage'] = _CLIENTAVAILABLEMESSAGE -DESCRIPTOR.message_types_by_name['ListClientsRequest'] = _LISTCLIENTSREQUEST -DESCRIPTOR.message_types_by_name['ClientList'] = _CLIENTLIST -DESCRIPTOR.message_types_by_name['Client'] = _CLIENT -DESCRIPTOR.message_types_by_name['ReassignRequest'] = _REASSIGNREQUEST -DESCRIPTOR.message_types_by_name['ReconnectRequest'] = _RECONNECTREQUEST -DESCRIPTOR.message_types_by_name['Parameter'] = _PARAMETER -DESCRIPTOR.message_types_by_name['ControlRequest'] = _CONTROLREQUEST -DESCRIPTOR.message_types_by_name['ControlResponse'] = _CONTROLRESPONSE -DESCRIPTOR.message_types_by_name['ReportResponse'] = _REPORTRESPONSE -DESCRIPTOR.message_types_by_name['ConnectionRequest'] = _CONNECTIONREQUEST -DESCRIPTOR.message_types_by_name['ConnectionResponse'] = _CONNECTIONRESPONSE -DESCRIPTOR.enum_types_by_name['StatusType'] = _STATUSTYPE -DESCRIPTOR.enum_types_by_name['Channel'] = _CHANNEL -DESCRIPTOR.enum_types_by_name['ModelStatus'] = _MODELSTATUS -DESCRIPTOR.enum_types_by_name['Role'] = _ROLE -DESCRIPTOR.enum_types_by_name['Command'] = _COMMAND -DESCRIPTOR.enum_types_by_name['ConnectionStatus'] = _CONNECTIONSTATUS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) +_RESPONSE = DESCRIPTOR.message_types_by_name['Response'] +_STATUS = DESCRIPTOR.message_types_by_name['Status'] +_MODELUPDATEREQUEST = DESCRIPTOR.message_types_by_name['ModelUpdateRequest'] +_MODELUPDATE = DESCRIPTOR.message_types_by_name['ModelUpdate'] +_MODELVALIDATIONREQUEST = DESCRIPTOR.message_types_by_name['ModelValidationRequest'] +_MODELVALIDATION = DESCRIPTOR.message_types_by_name['ModelValidation'] +_MODELREQUEST = DESCRIPTOR.message_types_by_name['ModelRequest'] +_MODELRESPONSE = DESCRIPTOR.message_types_by_name['ModelResponse'] +_GETGLOBALMODELREQUEST = DESCRIPTOR.message_types_by_name['GetGlobalModelRequest'] +_GETGLOBALMODELRESPONSE = DESCRIPTOR.message_types_by_name['GetGlobalModelResponse'] +_HEARTBEAT = DESCRIPTOR.message_types_by_name['Heartbeat'] +_CLIENTAVAILABLEMESSAGE = DESCRIPTOR.message_types_by_name['ClientAvailableMessage'] +_LISTCLIENTSREQUEST = DESCRIPTOR.message_types_by_name['ListClientsRequest'] +_CLIENTLIST = DESCRIPTOR.message_types_by_name['ClientList'] +_CLIENT = DESCRIPTOR.message_types_by_name['Client'] +_REASSIGNREQUEST = DESCRIPTOR.message_types_by_name['ReassignRequest'] +_RECONNECTREQUEST = DESCRIPTOR.message_types_by_name['ReconnectRequest'] +_PARAMETER = DESCRIPTOR.message_types_by_name['Parameter'] +_CONTROLREQUEST = DESCRIPTOR.message_types_by_name['ControlRequest'] +_CONTROLRESPONSE = DESCRIPTOR.message_types_by_name['ControlResponse'] +_REPORTRESPONSE = DESCRIPTOR.message_types_by_name['ReportResponse'] +_CONNECTIONREQUEST = DESCRIPTOR.message_types_by_name['ConnectionRequest'] +_CONNECTIONRESPONSE = DESCRIPTOR.message_types_by_name['ConnectionResponse'] +_STATUS_LOGLEVEL = _STATUS.enum_types_by_name['LogLevel'] Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), { - 'DESCRIPTOR': _RESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Response) -}) + 'DESCRIPTOR' : _RESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Response) + }) _sym_db.RegisterMessage(Response) Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), { - 'DESCRIPTOR': _STATUS, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Status) -}) + 'DESCRIPTOR' : _STATUS, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Status) + }) _sym_db.RegisterMessage(Status) ModelUpdateRequest = _reflection.GeneratedProtocolMessageType('ModelUpdateRequest', (_message.Message,), { - 'DESCRIPTOR': _MODELUPDATEREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelUpdateRequest) -}) + 'DESCRIPTOR' : _MODELUPDATEREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelUpdateRequest) + }) _sym_db.RegisterMessage(ModelUpdateRequest) ModelUpdate = _reflection.GeneratedProtocolMessageType('ModelUpdate', (_message.Message,), { - 'DESCRIPTOR': _MODELUPDATE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelUpdate) -}) + 'DESCRIPTOR' : _MODELUPDATE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelUpdate) + }) _sym_db.RegisterMessage(ModelUpdate) ModelValidationRequest = _reflection.GeneratedProtocolMessageType('ModelValidationRequest', (_message.Message,), { - 'DESCRIPTOR': _MODELVALIDATIONREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelValidationRequest) -}) + 'DESCRIPTOR' : _MODELVALIDATIONREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelValidationRequest) + }) _sym_db.RegisterMessage(ModelValidationRequest) ModelValidation = _reflection.GeneratedProtocolMessageType('ModelValidation', (_message.Message,), { - 'DESCRIPTOR': _MODELVALIDATION, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelValidation) -}) + 'DESCRIPTOR' : _MODELVALIDATION, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelValidation) + }) _sym_db.RegisterMessage(ModelValidation) ModelRequest = _reflection.GeneratedProtocolMessageType('ModelRequest', (_message.Message,), { - 'DESCRIPTOR': _MODELREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelRequest) -}) + 'DESCRIPTOR' : _MODELREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelRequest) + }) _sym_db.RegisterMessage(ModelRequest) ModelResponse = _reflection.GeneratedProtocolMessageType('ModelResponse', (_message.Message,), { - 'DESCRIPTOR': _MODELRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ModelResponse) -}) + 'DESCRIPTOR' : _MODELRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ModelResponse) + }) _sym_db.RegisterMessage(ModelResponse) GetGlobalModelRequest = _reflection.GeneratedProtocolMessageType('GetGlobalModelRequest', (_message.Message,), { - 'DESCRIPTOR': _GETGLOBALMODELREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelRequest) -}) + 'DESCRIPTOR' : _GETGLOBALMODELREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelRequest) + }) _sym_db.RegisterMessage(GetGlobalModelRequest) GetGlobalModelResponse = _reflection.GeneratedProtocolMessageType('GetGlobalModelResponse', (_message.Message,), { - 'DESCRIPTOR': _GETGLOBALMODELRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelResponse) -}) + 'DESCRIPTOR' : _GETGLOBALMODELRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.GetGlobalModelResponse) + }) _sym_db.RegisterMessage(GetGlobalModelResponse) Heartbeat = _reflection.GeneratedProtocolMessageType('Heartbeat', (_message.Message,), { - 'DESCRIPTOR': _HEARTBEAT, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Heartbeat) -}) + 'DESCRIPTOR' : _HEARTBEAT, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Heartbeat) + }) _sym_db.RegisterMessage(Heartbeat) ClientAvailableMessage = _reflection.GeneratedProtocolMessageType('ClientAvailableMessage', (_message.Message,), { - 'DESCRIPTOR': _CLIENTAVAILABLEMESSAGE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ClientAvailableMessage) -}) + 'DESCRIPTOR' : _CLIENTAVAILABLEMESSAGE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ClientAvailableMessage) + }) _sym_db.RegisterMessage(ClientAvailableMessage) ListClientsRequest = _reflection.GeneratedProtocolMessageType('ListClientsRequest', (_message.Message,), { - 'DESCRIPTOR': _LISTCLIENTSREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ListClientsRequest) -}) + 'DESCRIPTOR' : _LISTCLIENTSREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ListClientsRequest) + }) _sym_db.RegisterMessage(ListClientsRequest) ClientList = _reflection.GeneratedProtocolMessageType('ClientList', (_message.Message,), { - 'DESCRIPTOR': _CLIENTLIST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ClientList) -}) + 'DESCRIPTOR' : _CLIENTLIST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ClientList) + }) _sym_db.RegisterMessage(ClientList) Client = _reflection.GeneratedProtocolMessageType('Client', (_message.Message,), { - 'DESCRIPTOR': _CLIENT, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Client) -}) + 'DESCRIPTOR' : _CLIENT, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Client) + }) _sym_db.RegisterMessage(Client) ReassignRequest = _reflection.GeneratedProtocolMessageType('ReassignRequest', (_message.Message,), { - 'DESCRIPTOR': _REASSIGNREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ReassignRequest) -}) + 'DESCRIPTOR' : _REASSIGNREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ReassignRequest) + }) _sym_db.RegisterMessage(ReassignRequest) ReconnectRequest = _reflection.GeneratedProtocolMessageType('ReconnectRequest', (_message.Message,), { - 'DESCRIPTOR': _RECONNECTREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ReconnectRequest) -}) + 'DESCRIPTOR' : _RECONNECTREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ReconnectRequest) + }) _sym_db.RegisterMessage(ReconnectRequest) Parameter = _reflection.GeneratedProtocolMessageType('Parameter', (_message.Message,), { - 'DESCRIPTOR': _PARAMETER, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.Parameter) -}) + 'DESCRIPTOR' : _PARAMETER, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.Parameter) + }) _sym_db.RegisterMessage(Parameter) ControlRequest = _reflection.GeneratedProtocolMessageType('ControlRequest', (_message.Message,), { - 'DESCRIPTOR': _CONTROLREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ControlRequest) -}) + 'DESCRIPTOR' : _CONTROLREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ControlRequest) + }) _sym_db.RegisterMessage(ControlRequest) ControlResponse = _reflection.GeneratedProtocolMessageType('ControlResponse', (_message.Message,), { - 'DESCRIPTOR': _CONTROLRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ControlResponse) -}) + 'DESCRIPTOR' : _CONTROLRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ControlResponse) + }) _sym_db.RegisterMessage(ControlResponse) ReportResponse = _reflection.GeneratedProtocolMessageType('ReportResponse', (_message.Message,), { - 'DESCRIPTOR': _REPORTRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ReportResponse) -}) + 'DESCRIPTOR' : _REPORTRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ReportResponse) + }) _sym_db.RegisterMessage(ReportResponse) ConnectionRequest = _reflection.GeneratedProtocolMessageType('ConnectionRequest', (_message.Message,), { - 'DESCRIPTOR': _CONNECTIONREQUEST, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ConnectionRequest) -}) + 'DESCRIPTOR' : _CONNECTIONREQUEST, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ConnectionRequest) + }) _sym_db.RegisterMessage(ConnectionRequest) ConnectionResponse = _reflection.GeneratedProtocolMessageType('ConnectionResponse', (_message.Message,), { - 'DESCRIPTOR': _CONNECTIONRESPONSE, - '__module__': 'fedn.common.net.grpc.fedn_pb2' - # @@protoc_insertion_point(class_scope:grpc.ConnectionResponse) -}) + 'DESCRIPTOR' : _CONNECTIONRESPONSE, + '__module__' : 'fedn.common.net.grpc.fedn_pb2' + # @@protoc_insertion_point(class_scope:grpc.ConnectionResponse) + }) _sym_db.RegisterMessage(ConnectionResponse) -_MODELSERVICE = _descriptor.ServiceDescriptor( - name='ModelService', - full_name='grpc.ModelService', - file=DESCRIPTOR, - index=0, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=2898, - serialized_end=3020, - methods=[ - _descriptor.MethodDescriptor( - name='Upload', - full_name='grpc.ModelService.Upload', - index=0, - containing_service=None, - input_type=_MODELREQUEST, - output_type=_MODELRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Download', - full_name='grpc.ModelService.Download', - index=1, - containing_service=None, - input_type=_MODELREQUEST, - output_type=_MODELRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_MODELSERVICE) - -DESCRIPTOR.services_by_name['ModelService'] = _MODELSERVICE - -_CONTROL = _descriptor.ServiceDescriptor( - name='Control', - full_name='grpc.Control', - file=DESCRIPTOR, - index=1, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3023, - serialized_end=3250, - methods=[ - _descriptor.MethodDescriptor( - name='Start', - full_name='grpc.Control.Start', - index=0, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_CONTROLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Stop', - full_name='grpc.Control.Stop', - index=1, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_CONTROLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Configure', - full_name='grpc.Control.Configure', - index=2, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_REPORTRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='Report', - full_name='grpc.Control.Report', - index=3, - containing_service=None, - input_type=_CONTROLREQUEST, - output_type=_REPORTRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_CONTROL) - -DESCRIPTOR.services_by_name['Control'] = _CONTROL - -_REDUCER = _descriptor.ServiceDescriptor( - name='Reducer', - full_name='grpc.Reducer', - file=DESCRIPTOR, - index=2, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3252, - serialized_end=3338, - methods=[ - _descriptor.MethodDescriptor( - name='GetGlobalModel', - full_name='grpc.Reducer.GetGlobalModel', - index=0, - containing_service=None, - input_type=_GETGLOBALMODELREQUEST, - output_type=_GETGLOBALMODELRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_REDUCER) - -DESCRIPTOR.services_by_name['Reducer'] = _REDUCER - -_CONNECTOR = _descriptor.ServiceDescriptor( - name='Connector', - full_name='grpc.Connector', - file=DESCRIPTOR, - index=3, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3341, - serialized_end=3768, - methods=[ - _descriptor.MethodDescriptor( - name='AllianceStatusStream', - full_name='grpc.Connector.AllianceStatusStream', - index=0, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_STATUS, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendStatus', - full_name='grpc.Connector.SendStatus', - index=1, - containing_service=None, - input_type=_STATUS, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ListActiveClients', - full_name='grpc.Connector.ListActiveClients', - index=2, - containing_service=None, - input_type=_LISTCLIENTSREQUEST, - output_type=_CLIENTLIST, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='AcceptingClients', - full_name='grpc.Connector.AcceptingClients', - index=3, - containing_service=None, - input_type=_CONNECTIONREQUEST, - output_type=_CONNECTIONRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendHeartbeat', - full_name='grpc.Connector.SendHeartbeat', - index=4, - containing_service=None, - input_type=_HEARTBEAT, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ReassignClient', - full_name='grpc.Connector.ReassignClient', - index=5, - containing_service=None, - input_type=_REASSIGNREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ReconnectClient', - full_name='grpc.Connector.ReconnectClient', - index=6, - containing_service=None, - input_type=_RECONNECTREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_CONNECTOR) - -DESCRIPTOR.services_by_name['Connector'] = _CONNECTOR - -_COMBINER = _descriptor.ServiceDescriptor( - name='Combiner', - full_name='grpc.Combiner', - file=DESCRIPTOR, - index=4, - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_start=3771, - serialized_end=4373, - methods=[ - _descriptor.MethodDescriptor( - name='ModelUpdateRequestStream', - full_name='grpc.Combiner.ModelUpdateRequestStream', - index=0, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELUPDATEREQUEST, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ModelUpdateStream', - full_name='grpc.Combiner.ModelUpdateStream', - index=1, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELUPDATE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ModelValidationRequestStream', - full_name='grpc.Combiner.ModelValidationRequestStream', - index=2, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELVALIDATIONREQUEST, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='ModelValidationStream', - full_name='grpc.Combiner.ModelValidationStream', - index=3, - containing_service=None, - input_type=_CLIENTAVAILABLEMESSAGE, - output_type=_MODELVALIDATION, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelUpdateRequest', - full_name='grpc.Combiner.SendModelUpdateRequest', - index=4, - containing_service=None, - input_type=_MODELUPDATEREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelUpdate', - full_name='grpc.Combiner.SendModelUpdate', - index=5, - containing_service=None, - input_type=_MODELUPDATE, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelValidationRequest', - full_name='grpc.Combiner.SendModelValidationRequest', - index=6, - containing_service=None, - input_type=_MODELVALIDATIONREQUEST, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name='SendModelValidation', - full_name='grpc.Combiner.SendModelValidation', - index=7, - containing_service=None, - input_type=_MODELVALIDATION, - output_type=_RESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - ]) -_sym_db.RegisterServiceDescriptor(_COMBINER) - -DESCRIPTOR.services_by_name['Combiner'] = _COMBINER - +_MODELSERVICE = DESCRIPTOR.services_by_name['ModelService'] +_CONTROL = DESCRIPTOR.services_by_name['Control'] +_REDUCER = DESCRIPTOR.services_by_name['Reducer'] +_CONNECTOR = DESCRIPTOR.services_by_name['Connector'] +_COMBINER = DESCRIPTOR.services_by_name['Combiner'] +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _STATUSTYPE._serialized_start=2412 + _STATUSTYPE._serialized_end=2544 + _CHANNEL._serialized_start=2547 + _CHANNEL._serialized_end=2681 + _MODELSTATUS._serialized_start=2683 + _MODELSTATUS._serialized_end=2753 + _ROLE._serialized_start=2755 + _ROLE._serialized_end=2811 + _COMMAND._serialized_start=2813 + _COMMAND._serialized_end=2887 + _CONNECTIONSTATUS._serialized_start=2889 + _CONNECTIONSTATUS._serialized_end=2962 + _RESPONSE._serialized_start=41 + _RESPONSE._serialized_end=99 + _STATUS._serialized_start=102 + _STATUS._serialized_end=370 + _STATUS_LOGLEVEL._serialized_start=304 + _STATUS_LOGLEVEL._serialized_end=370 + _MODELUPDATEREQUEST._serialized_start=373 + _MODELUPDATEREQUEST._serialized_end=544 + _MODELUPDATE._serialized_start=547 + _MODELUPDATE._serialized_end=722 + _MODELVALIDATIONREQUEST._serialized_start=725 + _MODELVALIDATIONREQUEST._serialized_end=922 + _MODELVALIDATION._serialized_start=925 + _MODELVALIDATION._serialized_end=1093 + _MODELREQUEST._serialized_start=1096 + _MODELREQUEST._serialized_end=1233 + _MODELRESPONSE._serialized_start=1235 + _MODELRESPONSE._serialized_end=1328 + _GETGLOBALMODELREQUEST._serialized_start=1330 + _GETGLOBALMODELREQUEST._serialized_end=1415 + _GETGLOBALMODELRESPONSE._serialized_start=1417 + _GETGLOBALMODELRESPONSE._serialized_end=1521 + _HEARTBEAT._serialized_start=1523 + _HEARTBEAT._serialized_end=1564 + _CLIENTAVAILABLEMESSAGE._serialized_start=1566 + _CLIENTAVAILABLEMESSAGE._serialized_end=1653 + _LISTCLIENTSREQUEST._serialized_start=1655 + _LISTCLIENTSREQUEST._serialized_end=1737 + _CLIENTLIST._serialized_start=1739 + _CLIENTLIST._serialized_end=1781 + _CLIENT._serialized_start=1783 + _CLIENT._serialized_end=1831 + _REASSIGNREQUEST._serialized_start=1833 + _REASSIGNREQUEST._serialized_end=1942 + _RECONNECTREQUEST._serialized_start=1944 + _RECONNECTREQUEST._serialized_end=2043 + _PARAMETER._serialized_start=2045 + _PARAMETER._serialized_end=2084 + _CONTROLREQUEST._serialized_start=2086 + _CONTROLREQUEST._serialized_end=2170 + _CONTROLRESPONSE._serialized_start=2172 + _CONTROLRESPONSE._serialized_end=2242 + _REPORTRESPONSE._serialized_start=2244 + _REPORTRESPONSE._serialized_end=2326 + _CONNECTIONREQUEST._serialized_start=2328 + _CONNECTIONREQUEST._serialized_end=2347 + _CONNECTIONRESPONSE._serialized_start=2349 + _CONNECTIONRESPONSE._serialized_end=2409 + _MODELSERVICE._serialized_start=2964 + _MODELSERVICE._serialized_end=3086 + _CONTROL._serialized_start=3089 + _CONTROL._serialized_end=3386 + _REDUCER._serialized_start=3388 + _REDUCER._serialized_end=3474 + _CONNECTOR._serialized_start=3477 + _CONNECTOR._serialized_end=3904 + _COMBINER._serialized_start=3907 + _COMBINER._serialized_end=4509 # @@protoc_insertion_point(module_scope) diff --git a/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py b/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py index 9989824f7..9590e2b5c 100644 --- a/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py +++ b/fedn/fedn/common/net/grpc/fedn_pb2_grpc.py @@ -2,8 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -from fedn.common.net.grpc import \ - fedn_pb2 as fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2 +from fedn.common.net.grpc import fedn_pb2 as fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2 class ModelServiceStub(object): @@ -16,15 +15,15 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Upload = channel.stream_unary( - '/grpc.ModelService/Upload', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - ) + '/grpc.ModelService/Upload', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + ) self.Download = channel.unary_stream( - '/grpc.ModelService/Download', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - ) + '/grpc.ModelService/Download', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + ) class ModelServiceServicer(object): @@ -44,95 +43,60 @@ def Download(self, request, context): def add_ModelServiceServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'Upload': grpc.stream_unary_rpc_method_handler( - servicer.Upload, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, - ), - 'Download': grpc.unary_stream_rpc_method_handler( - servicer.Download, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, - ), + 'Upload': grpc.stream_unary_rpc_method_handler( + servicer.Upload, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, + ), + 'Download': grpc.unary_stream_rpc_method_handler( + servicer.Download, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.ModelService', rpc_method_handlers) + 'grpc.ModelService', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class ModelService(object): """Missing associated documentation comment in .proto file.""" @staticmethod def Upload(request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request_iterator: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.stream_unary(request_iterator, target, '/grpc.ModelService/Upload', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Download(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.ModelService/Download', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class ControlStub(object): @@ -145,25 +109,30 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Start = channel.unary_unary( - '/grpc.Control/Start', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - ) + '/grpc.Control/Start', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + ) self.Stop = channel.unary_unary( - '/grpc.Control/Stop', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - ) + '/grpc.Control/Stop', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + ) self.Configure = channel.unary_unary( - '/grpc.Control/Configure', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - ) + '/grpc.Control/Configure', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + ) + self.FlushAggregationQueue = channel.unary_unary( + '/grpc.Control/FlushAggregationQueue', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + ) self.Report = channel.unary_unary( - '/grpc.Control/Report', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - ) + '/grpc.Control/Report', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + ) class ControlServicer(object): @@ -187,6 +156,12 @@ def Configure(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def FlushAggregationQueue(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Report(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -195,165 +170,126 @@ def Report(self, request, context): def add_ControlServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'Start': grpc.unary_unary_rpc_method_handler( - servicer.Start, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, - ), - 'Stop': grpc.unary_unary_rpc_method_handler( - servicer.Stop, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, - ), - 'Configure': grpc.unary_unary_rpc_method_handler( - servicer.Configure, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, - ), - 'Report': grpc.unary_unary_rpc_method_handler( - servicer.Report, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, - ), + 'Start': grpc.unary_unary_rpc_method_handler( + servicer.Start, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, + ), + 'Stop': grpc.unary_unary_rpc_method_handler( + servicer.Stop, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, + ), + 'Configure': grpc.unary_unary_rpc_method_handler( + servicer.Configure, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, + ), + 'FlushAggregationQueue': grpc.unary_unary_rpc_method_handler( + servicer.FlushAggregationQueue, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.SerializeToString, + ), + 'Report': grpc.unary_unary_rpc_method_handler( + servicer.Report, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Control', rpc_method_handlers) + 'grpc.Control', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Control(object): """Missing associated documentation comment in .proto file.""" @staticmethod def Start(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Start', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Stop(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Stop', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def Configure(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Configure', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod - def Report(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ + def FlushAggregationQueue(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/grpc.Control/FlushAggregationQueue', + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + @staticmethod + def Report(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Control/Report', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ControlRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReportResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class ReducerStub(object): @@ -366,10 +302,10 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.GetGlobalModel = channel.unary_unary( - '/grpc.Reducer/GetGlobalModel', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, - ) + '/grpc.Reducer/GetGlobalModel', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, + ) class ReducerServicer(object): @@ -383,57 +319,38 @@ def GetGlobalModel(self, request, context): def add_ReducerServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'GetGlobalModel': grpc.unary_unary_rpc_method_handler( - servicer.GetGlobalModel, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.SerializeToString, - ), + 'GetGlobalModel': grpc.unary_unary_rpc_method_handler( + servicer.GetGlobalModel, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Reducer', rpc_method_handlers) + 'grpc.Reducer', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Reducer(object): """Missing associated documentation comment in .proto file.""" @staticmethod def GetGlobalModel(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Reducer/GetGlobalModel', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.GetGlobalModelResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class ConnectorStub(object): @@ -446,40 +363,40 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.AllianceStatusStream = channel.unary_stream( - '/grpc.Connector/AllianceStatusStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, - ) + '/grpc.Connector/AllianceStatusStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, + ) self.SendStatus = channel.unary_unary( - '/grpc.Connector/SendStatus', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/SendStatus', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.ListActiveClients = channel.unary_unary( - '/grpc.Connector/ListActiveClients', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, - ) + '/grpc.Connector/ListActiveClients', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, + ) self.AcceptingClients = channel.unary_unary( - '/grpc.Connector/AcceptingClients', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, - ) + '/grpc.Connector/AcceptingClients', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, + ) self.SendHeartbeat = channel.unary_unary( - '/grpc.Connector/SendHeartbeat', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/SendHeartbeat', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.ReassignClient = channel.unary_unary( - '/grpc.Connector/ReassignClient', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/ReassignClient', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.ReconnectClient = channel.unary_unary( - '/grpc.Connector/ReconnectClient', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Connector/ReconnectClient', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) class ConnectorServicer(object): @@ -534,274 +451,170 @@ def ReconnectClient(self, request, context): def add_ConnectorServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'AllianceStatusStream': grpc.unary_stream_rpc_method_handler( - servicer.AllianceStatusStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, - ), - 'SendStatus': grpc.unary_unary_rpc_method_handler( - servicer.SendStatus, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'ListActiveClients': grpc.unary_unary_rpc_method_handler( - servicer.ListActiveClients, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.SerializeToString, - ), - 'AcceptingClients': grpc.unary_unary_rpc_method_handler( - servicer.AcceptingClients, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.SerializeToString, - ), - 'SendHeartbeat': grpc.unary_unary_rpc_method_handler( - servicer.SendHeartbeat, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'ReassignClient': grpc.unary_unary_rpc_method_handler( - servicer.ReassignClient, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'ReconnectClient': grpc.unary_unary_rpc_method_handler( - servicer.ReconnectClient, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), + 'AllianceStatusStream': grpc.unary_stream_rpc_method_handler( + servicer.AllianceStatusStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, + ), + 'SendStatus': grpc.unary_unary_rpc_method_handler( + servicer.SendStatus, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'ListActiveClients': grpc.unary_unary_rpc_method_handler( + servicer.ListActiveClients, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.SerializeToString, + ), + 'AcceptingClients': grpc.unary_unary_rpc_method_handler( + servicer.AcceptingClients, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.SerializeToString, + ), + 'SendHeartbeat': grpc.unary_unary_rpc_method_handler( + servicer.SendHeartbeat, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'ReassignClient': grpc.unary_unary_rpc_method_handler( + servicer.ReassignClient, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'ReconnectClient': grpc.unary_unary_rpc_method_handler( + servicer.ReconnectClient, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Connector', rpc_method_handlers) + 'grpc.Connector', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Connector(object): """Missing associated documentation comment in .proto file.""" @staticmethod def AllianceStatusStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Connector/AllianceStatusStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendStatus(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/SendStatus', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Status.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ListActiveClients(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/ListActiveClients', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ListClientsRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientList.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def AcceptingClients(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/AcceptingClients', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ConnectionResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendHeartbeat(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/SendHeartbeat', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Heartbeat.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ReassignClient(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/ReassignClient', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReassignRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ReconnectClient(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Connector/ReconnectClient', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ReconnectRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) class CombinerStub(object): @@ -814,45 +627,45 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.ModelUpdateRequestStream = channel.unary_stream( - '/grpc.Combiner/ModelUpdateRequestStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, - ) + '/grpc.Combiner/ModelUpdateRequestStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, + ) self.ModelUpdateStream = channel.unary_stream( - '/grpc.Combiner/ModelUpdateStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, - ) + '/grpc.Combiner/ModelUpdateStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, + ) self.ModelValidationRequestStream = channel.unary_stream( - '/grpc.Combiner/ModelValidationRequestStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, - ) + '/grpc.Combiner/ModelValidationRequestStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, + ) self.ModelValidationStream = channel.unary_stream( - '/grpc.Combiner/ModelValidationStream', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, - ) + '/grpc.Combiner/ModelValidationStream', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, + ) self.SendModelUpdateRequest = channel.unary_unary( - '/grpc.Combiner/SendModelUpdateRequest', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelUpdateRequest', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.SendModelUpdate = channel.unary_unary( - '/grpc.Combiner/SendModelUpdate', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelUpdate', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.SendModelValidationRequest = channel.unary_unary( - '/grpc.Combiner/SendModelValidationRequest', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelValidationRequest', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) self.SendModelValidation = channel.unary_unary( - '/grpc.Combiner/SendModelValidation', - request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, - response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - ) + '/grpc.Combiner/SendModelValidation', + request_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, + response_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + ) class CombinerServicer(object): @@ -909,310 +722,189 @@ def SendModelValidation(self, request, context): def add_CombinerServicer_to_server(servicer, server): - """ - - :param servicer: - :param server: - """ rpc_method_handlers = { - 'ModelUpdateRequestStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelUpdateRequestStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, - ), - 'ModelUpdateStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelUpdateStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, - ), - 'ModelValidationRequestStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelValidationRequestStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, - ), - 'ModelValidationStream': grpc.unary_stream_rpc_method_handler( - servicer.ModelValidationStream, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, - ), - 'SendModelUpdateRequest': grpc.unary_unary_rpc_method_handler( - servicer.SendModelUpdateRequest, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'SendModelUpdate': grpc.unary_unary_rpc_method_handler( - servicer.SendModelUpdate, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'SendModelValidationRequest': grpc.unary_unary_rpc_method_handler( - servicer.SendModelValidationRequest, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), - 'SendModelValidation': grpc.unary_unary_rpc_method_handler( - servicer.SendModelValidation, - request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, - response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, - ), + 'ModelUpdateRequestStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelUpdateRequestStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, + ), + 'ModelUpdateStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelUpdateStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, + ), + 'ModelValidationRequestStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelValidationRequestStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, + ), + 'ModelValidationStream': grpc.unary_stream_rpc_method_handler( + servicer.ModelValidationStream, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, + ), + 'SendModelUpdateRequest': grpc.unary_unary_rpc_method_handler( + servicer.SendModelUpdateRequest, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'SendModelUpdate': grpc.unary_unary_rpc_method_handler( + servicer.SendModelUpdate, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'SendModelValidationRequest': grpc.unary_unary_rpc_method_handler( + servicer.SendModelValidationRequest, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), + 'SendModelValidation': grpc.unary_unary_rpc_method_handler( + servicer.SendModelValidation, + request_deserializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, + response_serializer=fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( - 'grpc.Combiner', rpc_method_handlers) + 'grpc.Combiner', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) -# This class is part of an EXPERIMENTAL API. + # This class is part of an EXPERIMENTAL API. class Combiner(object): """Missing associated documentation comment in .proto file.""" @staticmethod def ModelUpdateRequestStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelUpdateRequestStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ModelUpdateStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelUpdateStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ModelValidationRequestStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelValidationRequestStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def ModelValidationStream(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_stream(request, target, '/grpc.Combiner/ModelValidationStream', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, - metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ClientAvailableMessage.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelUpdateRequest(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelUpdateRequest', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdateRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelUpdate(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelUpdate', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelUpdate.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelValidationRequest(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelValidationRequest', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidationRequest.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def SendModelValidation(request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): - """ - - :param request: - :param target: - :param options: - :param channel_credentials: - :param call_credentials: - :param insecure: - :param compression: - :param wait_for_ready: - :param timeout: - :param metadata: - :return: - """ + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): return grpc.experimental.unary_unary(request, target, '/grpc.Combiner/SendModelValidation', - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, - fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, - options, channel_credentials, - insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.ModelValidation.SerializeToString, + fedn_dot_common_dot_net_dot_grpc_dot_fedn__pb2.Response.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/fedn/fedn/common/net/grpc/server.py b/fedn/fedn/common/net/grpc/server.py index d85a4bf04..dbe260b7b 100644 --- a/fedn/fedn/common/net/grpc/server.py +++ b/fedn/fedn/common/net/grpc/server.py @@ -27,7 +27,7 @@ def __init__(self, servicer, modelservicer, config): rpc.add_ControlServicer_to_server(servicer, self.server) if config['secure']: - print(f"Creating secure gRPCS server using certificate: {config['certificate']}", flush=True) + print("Creating secure gRPCS server using certificate: {config['certificate']}", flush=True) server_credentials = grpc.ssl_server_credentials( ((config['key'], config['certificate'],),)) self.server.add_secure_port( diff --git a/fedn/fedn/common/storage/models/modelstorage.py b/fedn/fedn/common/storage/models/modelstorage.py index 423ac411b..c15f47c87 100644 --- a/fedn/fedn/common/storage/models/modelstorage.py +++ b/fedn/fedn/common/storage/models/modelstorage.py @@ -5,37 +5,66 @@ class ModelStorage(ABC): @abstractmethod def exist(self, model_id): - """ + """ Check if model exists in storage - :param model_id: + :param model_id: The model id + :type model_id: str + :return: True if model exists, False otherwise + :rtype: bool """ pass @abstractmethod def get(self, model_id): - """ + """ Get model from storage - :param model_id: + :param model_id: The model id + :type model_id: str + :return: The model + :rtype: object """ pass - # @abstractmethod - # def set(self, model_id, model): - # pass - @abstractmethod def get_meta(self, model_id): - """ + """ Get model metadata from storage - :param model_id: + :param model_id: The model id + :type model_id: str + :return: The model metadata + :rtype: dict """ pass @abstractmethod def set_meta(self, model_id, model_metadata): + """ Set model metadata in storage + + :param model_id: The model id + :type model_id: str + :param model_metadata: The model metadata + :type model_metadata: dict + :return: True if successful, False otherwise + :rtype: bool """ + pass + + @abstractmethod + def delete(self, model_id): + """ Delete model from storage + + :param model_id: The model id + :type model_id: str + :return: True if successful, False otherwise + :rtype: bool + """ + pass + + @abstractmethod + def delete_all(self): + """ Delete all models from storage - :param model_id: - :param model_metadata: + :return: True if successful, False otherwise + :rtype: bool """ pass diff --git a/fedn/fedn/common/storage/models/tempmodelstorage.py b/fedn/fedn/common/storage/models/tempmodelstorage.py index f87c516b0..ea8a65928 100644 --- a/fedn/fedn/common/storage/models/tempmodelstorage.py +++ b/fedn/fedn/common/storage/models/tempmodelstorage.py @@ -85,3 +85,45 @@ def set_meta(self, model_id, model_metadata): :param model_metadata: """ self.models_metadata.update({model_id: model_metadata}) + + # Delete model from disk + def delete(self, model_id): + """ Delete model from temp disk/storage + + :param model_id: model id + :type model_id: str + :return: True if successful, False otherwise + :rtype: bool + """ + try: + os.remove(os.path.join(self.default_dir, str(model_id))) + print("TEMPMODELSTORAGE: Deleted model with id: {}".format(model_id), flush=True) + # Delete id from metadata and models dict + del self.models_metadata[model_id] + del self.models[model_id] + except FileNotFoundError: + print("Could not delete model from disk. File not found!", flush=True) + return False + return True + + # Delete all models from disk + def delete_all(self): + """ Delete all models from temp disk/storage + + :return: True if successful, False otherwise + :rtype: bool + """ + ids_pop = [] + for model_id in self.models.keys(): + try: + os.remove(os.path.join(self.default_dir, str(model_id))) + print("TEMPMODELSTORAGE: Deleted model with id: {}".format(model_id), flush=True) + # Add id to list of ids to pop/delete from metadata and models dict + ids_pop.append(model_id) + except FileNotFoundError: + print("TEMPMODELSTORAGE: Could not delete model {} from disk. File not found!".format(model_id), flush=True) + # Remove id from metadata and models dict + for model_id in ids_pop: + del self.models_metadata[model_id] + del self.models[model_id] + return True diff --git a/fedn/fedn/common/storage/models/tests/test_tempmodelstorage.py b/fedn/fedn/common/storage/models/tests/test_tempmodelstorage.py new file mode 100644 index 000000000..19d88899e --- /dev/null +++ b/fedn/fedn/common/storage/models/tests/test_tempmodelstorage.py @@ -0,0 +1,104 @@ +import os +import unittest +from unittest.mock import MagicMock, patch + +from fedn.common.storage.models.tempmodelstorage import TempModelStorage + + +class TestTempModelStorage(unittest.TestCase): + + def setUp(self): + # Setup mock for os.environ.get for FEDN_MODEL_DIR + self.patcher = patch('os.environ.get') + self.mock_get = self.patcher.start() + # Return value of mock should same folder as this file + self.mock_get.return_value = os.path.dirname(os.path.realpath(__file__)) + + # Setup storage + self.storage = TempModelStorage() + + # add mock data to storage dicts + self.storage.models = {"model_id1": "model1", "model_id2": "model2"} + self.storage.models_metadata = {"model_id1": "model1", "model_id2": "model2"} + + # Create mock file as BytesIO object + self.mock_file = MagicMock() + self.mock_file.read.return_value = "model1" + self.mock_file.seek.return_value = 0 + self.mock_file.write.return_value = None + + # Test that the storage is initialized with the correct default directory and data structures + def test_init(self): + self.assertEqual(self.storage.default_dir, os.path.dirname(os.path.realpath(__file__))) + self.assertEqual(self.storage.models, {"model_id1": "model1", "model_id2": "model2"}) + self.assertEqual(self.storage.models_metadata, {"model_id1": "model1", "model_id2": "model2"}) + + # Test that the storage can get a model + + def test_get(self): + """ Test that the storage can get a model """ + + # Test that it returns None if model_id does not exist + self.assertEqual(self.storage.get("model_id3"), None) + + # TODO: Patch fedn.ModelStatus.OK and open to return True and mock_file respectively + + def test_get_metadata(self): + """ Test that the storage can get a model metadata """ + + # Test that it returns KeyError if model_id does not exist + with self.assertRaises(KeyError): + self.storage.get_meta("model_id3") + + # Test that it returns the correct metadata if model_id exists + self.assertEqual(self.storage.get_meta("model_id1"), "model1") + + def test_set_meta(self): + """ Test that the storage can set a model metadata """ + + # Test that it returns the correct metadata if model_id exists + self.storage.set_meta("model_id1", "model3") + self.assertEqual(self.storage.get_meta("model_id1"), "model3") + + def test_delete(self): + """ Test that the storage can delete a model """ + + # Test that it returns False if model_id does not exist + self.assertEqual(self.storage.delete("model_id3"), False) + + # Patch os.remove to return True + with patch('os.remove', return_value=True) as mock_remove: + + # Test that it returns True if model_id exists + self.assertEqual(self.storage.delete("model_id1"), True) + + # Test that os.remove is called with the correct path + mock_remove.assert_called_with(os.path.join(self.storage.default_dir, "model_id1")) + + # Test that the model is removed from the storage + self.assertEqual(self.storage.models, {"model_id2": "model2"}) + + # Test that the model metadata is removed from the storage + self.assertEqual(self.storage.models_metadata, {"model_id2": "model2"}) + + def test_delete_all(self): + """ Test that the storage can delete all models """ + + # Patch os.remove to return True + with patch('os.remove', return_value=True) as mock_remove: + + # Test that it returns True if model_id exists + self.assertEqual(self.storage.delete_all(), True) + + # Test that os.remove is called with the correct path + mock_remove.assert_called_with(os.path.join(self.storage.default_dir, "model_id2")) + + # Test that the model is removed from the storage + self.assertEqual(self.storage.models, {}) + + # Test that the model metadata is removed from the storage + self.assertEqual(self.storage.models_metadata, {}) + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/common/storage/s3/s3repo.py b/fedn/fedn/common/storage/s3/s3repo.py index e660e9124..1d673c3da 100644 --- a/fedn/fedn/common/storage/s3/s3repo.py +++ b/fedn/fedn/common/storage/s3/s3repo.py @@ -48,7 +48,7 @@ def set_model(self, model, is_file=True): raise return str(model_id) - def set_compute_context(self, name, compute_package, is_file=True): + def set_compute_package(self, name, compute_package, is_file=True): """ :param name: @@ -75,7 +75,7 @@ def get_compute_package(self, compute_package): raise return data - def delete_compute_context(self, compute_package): + def delete_compute_package(self, compute_package): """ :param compute_package: diff --git a/fedn/fedn/common/tracer/mongotracer.py b/fedn/fedn/common/tracer/mongotracer.py index 5cf6a93a8..92af569ea 100644 --- a/fedn/fedn/common/tracer/mongotracer.py +++ b/fedn/fedn/common/tracer/mongotracer.py @@ -1,7 +1,5 @@ -import threading -from datetime import datetime +import uuid -import psutil from google.protobuf.json_format import MessageToDict from fedn.common.storage.db.mongo import connect_to_mongodb @@ -9,7 +7,7 @@ class MongoTracer(Tracer): - """ + """ Utitily for reporting and tracking state in the statestore. """ @@ -17,171 +15,70 @@ def __init__(self, mongo_config, network_id): try: self.mdb = connect_to_mongodb(mongo_config, network_id) self.status = self.mdb['control.status'] - self.round_time = self.mdb['control.round_time'] - self.psutil_monitoring = self.mdb['control.psutil_monitoring'] - self.model_trail = self.mdb['control.model_trail'] - self.latest_model = self.mdb['control.latest_model'] - self.combiner_round_time = self.mdb['control.combiner_round_time'] - # self.combiner_queue_length = self.mdb['control.combiner_queue_length'] - self.round = self.mdb['control.round'] + self.rounds = self.mdb['control.rounds'] + self.sessions = self.mdb['control.sessions'] + self.validations = self.mdb['control.validations'] except Exception as e: print("FAILED TO CONNECT TO MONGO, {}".format(e), flush=True) self.status = None raise - def report(self, msg): - """ + def report_status(self, msg): + """Write status message to the database. - :param msg: + :param msg: The status message. """ data = MessageToDict(msg, including_default_value_fields=True) - print("LOG: \n {} \n".format(data), flush=True) - if self.status is not None: self.status.insert_one(data) - def drop_round_time(self): - """ - - """ - if self.round_time: - self.round_time.drop() - - def drop_ps_util_monitor(self): - """ - - """ - if self.psutil_monitoring: - self.psutil_monitoring.drop() - - def drop_model_trail(self): - """ - - """ - if self.model_trail: - self.model_trail.drop() + def report_validation(self, validation): + """Write model validation to the database. - def drop_latest_model(self): + :param validation: The model validation. """ + data = MessageToDict(validation, including_default_value_fields=True) - """ - if self.latest_model: - self.latest_model.drop() + if self.validations is not None: + self.validations.insert_one(data) def drop_status(self): - """ + """Drop the status collection. """ if self.status: self.status.drop() - def drop_combiner_round_time(self): - """ - - """ - if self.combiner_round_time: - self.combiner_round_time.drop() - - def drop_combiner_round(self): - """ - - """ - if self.round: - self.round.drop() + def new_session(self, id=None): + """ Create a new session. """ + if not id: + id = uuid.uuid4() + data = {'session_id': str(id)} + self.sessions.insert_one(data) - def set_latest_time(self, round, round_time): - """ + def new_round(self, id): + """ Create a new session. """ - :param round: - :param round_time: - """ - self.round_time.update_one({'key': 'round_time'}, { - '$push': {'round': round}}, True) - self.round_time.update_one({'key': 'round_time'}, { - '$push': {'round_time': round_time}}, True) + data = {'round_id': str(id)} + self.rounds.insert_one(data) - def set_combiner_time(self, round, round_time): - """ + def set_session_config(self, id, config): + self.sessions.update_one({'session_id': str(id)}, { + '$push': {'session_config': config}}, True) - :param round: - :param round_time: - """ - self.combiner_round_time.update_one({'key': 'combiner_round_time'}, { - '$push': {'round': round}}, True) - self.combiner_round_time.update_one({'key': 'combiner_round_time'}, { - '$push': {'round_time': round_time}}, True) - - # def set_combiner_queue_length(self,timestamp,ql): - # self.combiner_queue_length({'key': 'combiner_queue_length'}, {'$push': {'queue_length': ql}}, True) - # self.combiner_queue_length.update({'key': 'combiner_queue_length'}, {'$push': {'timestamp': timestamp}}, True) - - # Round statistics - def set_round_meta(self, round_meta): + def set_round_combiner_data(self, data): """ :param round_meta: """ - self.round.update_one({'key': str(round_meta['round_id'])}, { - '$push': {'combiners': round_meta}}, True) + self.rounds.update_one({'round_id': str(data['round_id'])}, { + '$push': {'combiners': data}}, True) - def set_round_meta_reducer(self, round_meta): + def set_round_data(self, round_data): """ :param round_meta: """ - self.round.update_one({'key': str(round_meta['round_id'])}, { - '$push': {'reducer': round_meta}}, True) - - def get_latest_round(self): - """ - - :return: - """ - for post in self.round_time.find({'key': 'round_time'}): - last_round = post['round'][-1] - return last_round - - def ps_util_monitor(self, round=None): - """ - - :param round: - """ - global running - running = True - currentProcess = psutil.Process() - # start loop - while running: - cpu_percents = currentProcess.cpu_percent(interval=1) - mem_percents = currentProcess.memory_percent() - ps_time = str(datetime.now()) - - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'cpu': cpu_percents}}, True) - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'mem': mem_percents}}, True) - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'time': ps_time}}, True) - self.psutil_monitoring.update_one({'key': 'cpu_mem_usage'}, { - '$push': {'round': round}}, True) - - def start_monitor(self, round=None): - """ - - :param round: - """ - global t - # create thread and start it - t = threading.Thread(target=self.ps_util_monitor, args=[round]) - t.start() - - def stop_monitor(self): - """ - - """ - global running - global t - # use `running` to stop loop in thread so thread will end - running = False - # wait for thread's end - t.join() + self.rounds.update_one({'round_id': str(round_data['round_id'])}, { + '$push': {'reducer': round_data}}, True) diff --git a/fedn/fedn/common/tracer/tracer.py b/fedn/fedn/common/tracer/tracer.py index c5381ff81..95b5fec59 100644 --- a/fedn/fedn/common/tracer/tracer.py +++ b/fedn/fedn/common/tracer/tracer.py @@ -3,7 +3,7 @@ class Tracer(ABC): @abstractmethod - def report(self, msg): + def report_status(self, msg): """ :param msg: diff --git a/fedn/fedn/clients/__init__.py b/fedn/fedn/network/__init__.py similarity index 100% rename from fedn/fedn/clients/__init__.py rename to fedn/fedn/network/__init__.py diff --git a/fedn/fedn/aggregators/__init__.py b/fedn/fedn/network/clients/__init__.py similarity index 100% rename from fedn/fedn/aggregators/__init__.py rename to fedn/fedn/network/clients/__init__.py diff --git a/fedn/fedn/client.py b/fedn/fedn/network/clients/client.py similarity index 83% rename from fedn/fedn/client.py rename to fedn/fedn/network/clients/client.py index f1abfc3db..b2d358112 100644 --- a/fedn/fedn/client.py +++ b/fedn/fedn/network/clients/client.py @@ -20,10 +20,10 @@ import fedn.common.net.grpc.fedn_pb2 as fedn import fedn.common.net.grpc.fedn_pb2_grpc as rpc -from fedn.clients.client.state import ClientState, ClientStateToString from fedn.common.control.package import PackageRuntime from fedn.common.net.connect import ConnectorClient, Status from fedn.common.net.web.client import page, style +from fedn.network.clients.state import ClientState, ClientStateToString from fedn.utils.dispatcher import Dispatcher from fedn.utils.helpers import get_helper from fedn.utils.logger import Logger @@ -54,13 +54,12 @@ class Client: """ def __init__(self, config): - """ - Parameters - ---------- - config: dict - A configuration dictionary containing connection information for - the discovery service (controller) and settings governing e.g. - client-combiner assignment behavior. + """Initialize the client. + + :param config: A configuration dictionary containing connection information for + the discovery service (controller) and settings governing e.g. + client-combiner assignment behavior. + :type config: dict """ self.state = None @@ -111,6 +110,7 @@ def __init__(self, config): self.state = ClientState.idle def _detach(self): + """Detach from the FEDn network (disconnect from combiner)""" # Setting _attached to False will make all processing threads return if not self._attached: print("Client is not attached.", flush=True) @@ -120,7 +120,7 @@ def _detach(self): self._disconnect() def _attach(self): - """ """ + """Attach to the FEDn network (connect to combiner)""" # Ask controller for a combiner and connect to that combiner. if self._attached: print("Client is already attached. ", flush=True) @@ -134,6 +134,13 @@ def _attach(self): return client_config def _initialize_helper(self, client_config): + """Initialize the helper class for the client. + + :param client_config: A configuration dictionary containing connection information for + the discovery service (controller) and settings governing e.g. + client-combiner assignment behavior. + :type client_config: dict + """ if 'model_type' in client_config.keys(): self.helper = get_helper(client_config['model_type']) @@ -141,6 +148,10 @@ def _initialize_helper(self, client_config): def _subscribe_to_combiner(self, config): """Listen to combiner message stream and start all processing threads. + :param config: A configuration dictionary containing connection information for + the discovery service (controller) and settings governing e.g. + client-combiner assignment behavior. + """ # Start sending heartbeats to the combiner. @@ -160,7 +171,14 @@ def _subscribe_to_combiner(self, config): threading.Thread(target=self.process_request, daemon=True).start() def _initialize_dispatcher(self, config): - """ """ + """ Initialize the dispatcher for the client. + + :param config: A configuration dictionary containing connection information for + the discovery service (controller) and settings governing e.g. + client-combiner assignment behavior. + :type config: dict + + """ if config['remote_compute_context']: pr = PackageRuntime(os.getcwd(), os.getcwd()) @@ -215,7 +233,11 @@ def _initialize_dispatcher(self, config): self.dispatcher = Dispatcher(dispatch_config, self.run_path) def _assign(self): - """Contacts the controller and asks for combiner assignment. """ + """Contacts the controller and asks for combiner assignment. + + :return: A configuration dictionary containing connection information for combiner. + :rtype: dict + """ print("Asking for assignment!", flush=True) while True: @@ -242,12 +264,9 @@ def _assign(self): def _connect(self, client_config): """Connect to assigned combiner. - Parameters - ---------- - client_config : dict - A dictionary with connection information and settings - for the assigned combiner. - + :param client_config: A configuration dictionary containing connection information for + the combiner. + :type client_config: dict """ # TODO use the client_config['certificate'] for setting up secure comms' @@ -313,13 +332,12 @@ def _disconnect(self): def get_model(self, id): """Fetch a model from the assigned combiner. + Downloads the model update object via a gRPC streaming channel, Download. - Downloads the model update object via a gRPC streaming channel, Dowload. - - Parameters - ---------- - id : str - The id of the model update object. + :param id: The id of the model update object. + :type id: str + :return: The model update object. + :rtype: BytesIO """ data = BytesIO() @@ -342,12 +360,12 @@ def set_model(self, model, id): Uploads the model updated object via a gRPC streaming channel, Upload. - Parameters - ---------- - model : BytesIO, object - The model update object. - id : str - The id of the model update object. + :param model: The model update object. + :type model: BytesIO + :param id: The id of the model update object. + :type id: str + :return: The model update object. + :rtype: BytesIO """ if not isinstance(model, BytesIO): bt = BytesIO() @@ -360,9 +378,12 @@ def set_model(self, model, id): bt.seek(0, 0) def upload_request_generator(mdl): - """ + """Generator function for model upload requests. - :param mdl: + :param mdl: The model update object. + :type mdl: BytesIO + :return: A model update request. + :rtype: fedn.ModelRequest """ while True: b = mdl.read(CHUNK_SIZE) @@ -382,7 +403,11 @@ def upload_request_generator(mdl): return result def _listen_to_model_update_request_stream(self): - """Subscribe to the model update request stream. """ + """Subscribe to the model update request stream. + + :return: None + :rtype: None + """ r = fedn.ClientAvailableMessage() r.sender.name = self.name @@ -406,8 +431,6 @@ def _listen_to_model_update_request_stream(self): except grpc.RpcError: # TODO: make configurable timeout = 5 - # print("CLIENT __listen_to_model_update_request_stream: GRPC ERROR {} retrying in {}..".format( - # status_code.name, timeout), flush=True) time.sleep(timeout) except Exception: raise @@ -416,7 +439,12 @@ def _listen_to_model_update_request_stream(self): return def _listen_to_model_validation_request_stream(self): - """Subscribe to the model validation request stream. """ + """Subscribe to the model validation request stream. + + + :return: None + :rtype: None + """ r = fedn.ClientAvailableMessage() r.sender.name = self.name @@ -433,8 +461,6 @@ def _listen_to_model_validation_request_stream(self): except grpc.RpcError: # TODO: make configurable timeout = 5 - # print("CLIENT __listen_to_model_validation_request_stream: GRPC ERROR {} retrying in {}..".format( - # status_code.name, timeout), flush=True) time.sleep(timeout) except Exception: raise @@ -459,9 +485,10 @@ def process_request(self): request.model_id) processing_time = time.time()-tic meta['processing_time'] = processing_time + meta['config'] = request.data if model_id is not None: - # Notify the combiner that a model update is available + # Send model update to combiner update = fedn.ModelUpdate() update.sender.name = self.name update.sender.role = fedn.WORKER @@ -487,7 +514,7 @@ def process_request(self): elif task_type == 'validate': self.state = ClientState.validating metrics = self._process_validation_request( - request.model_id) + request.model_id, request.is_inference) if metrics is not None: # Send validation @@ -503,8 +530,15 @@ def process_request(self): validation.correlation_id = request.correlation_id _ = self.orchestrator.SendModelValidation( validation) + + # Set status type + if request.is_inference: + status_type = fedn.StatusType.INFERENCE + else: + status_type = fedn.StatusType.MODEL_VALIDATION + self._send_status("Model validation completed.", log_level=fedn.Status.AUDIT, - type=fedn.StatusType.MODEL_VALIDATION, request=validation) + type=status_type, request=validation) else: self._send_status("Client {} failed to complete model validation.".format(self.name), log_level=fedn.Status.WARNING, request=request) @@ -517,10 +551,10 @@ def process_request(self): def _process_training_request(self, model_id): """Process a training (model update) request. - Parameters - ---------- - model_id : Str - The id of the model to update. + :param model_id: The model id of the model to be updated. + :type model_id: str + :return: The model id of the updated model, or None if the update failed. And a dict with metadata. + :rtype: tuple """ @@ -554,8 +588,14 @@ def _process_training_request(self, model_id): self.set_model(out_model, str(updated_model_id)) meta['upload_model'] = time.time() - tic + # Read the metadata file + with open(outpath+'-metadata', 'r') as fh: + training_metadata = json.loads(fh.read()) + meta['training_metadata'] = training_metadata + os.unlink(inpath) os.unlink(outpath) + os.unlink(outpath+'-metadata') except Exception as e: print("ERROR could not process training request due to error: {}".format( @@ -567,9 +607,24 @@ def _process_training_request(self, model_id): return updated_model_id, meta - def _process_validation_request(self, model_id): + def _process_validation_request(self, model_id, is_inference): + """Process a validation request. + + :param model_id: The model id of the model to be validated. + :type model_id: str + :param is_inference: True if the validation is an inference request, False if it is a validation request. + :type is_inference: bool + :return: The validation metrics, or None if validation failed. + :rtype: dict + """ + # Figure out cmd + if is_inference: + cmd = 'infer' + else: + cmd = 'validate' + self._send_status( - "Processing validation request for model_id {}".format(model_id)) + f"Processing {cmd} request for model_id {model_id}") self.state = ClientState.validating try: model = self.get_model(str(model_id)) @@ -579,7 +634,7 @@ def _process_validation_request(self, model_id): fh.write(model.getbuffer()) _, outpath = tempfile.mkstemp() - self.dispatcher.run_cmd("validate {} {}".format(inpath, outpath)) + self.dispatcher.run_cmd(f"{cmd} {inpath} {outpath}") with open(outpath, "r") as fh: validation = json.loads(fh.read()) @@ -597,9 +652,7 @@ def _process_validation_request(self, model_id): return validation def _handle_combiner_failure(self): - """ Register failed combiner connection. - - """ + """ Register failed combiner connection.""" self._missed_heartbeat += 1 if self._missed_heartbeat > self.config['reconnect_after_missed_heartbeat']: self._detach() @@ -607,11 +660,10 @@ def _handle_combiner_failure(self): def _send_heartbeat(self, update_frequency=2.0): """Send a heartbeat to the combiner. - Parameters - ---------- - update_frequency : float - The interval in seconds between heartbeat messages. - + :param update_frequency: The frequency of the heartbeat in seconds. + :type update_frequency: float + :return: None if the client is detached. + :rtype: None """ while True: @@ -631,7 +683,17 @@ def _send_heartbeat(self, update_frequency=2.0): return def _send_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None): - """Send status message. """ + """Send status message. + + :param msg: The message to send. + :type msg: str + :param log_level: The log level of the message. + :type log_level: fedn.Status.INFO, fedn.Status.WARNING, fedn.Status.ERROR + :param type: The type of the message. + :type type: str + :param request: The request message. + :type request: fedn.Request + """ status = fedn.Status() status.timestamp = str(datetime.now()) status.sender.name = self.name @@ -652,7 +714,7 @@ def _send_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None) def run_web(self): """Starts a local logging UI (Flask app) serving on port 8080. - Currently not in use as default. + Currently not in use. """ app = Flask(__name__) @@ -676,7 +738,7 @@ def index(): sys.stdout = self._original_stdout def run(self): - """ Main run loop. """ + """ Run the client. """ try: cnt = 0 old_state = self.state diff --git a/fedn/fedn/clients/client/state.py b/fedn/fedn/network/clients/state.py similarity index 60% rename from fedn/fedn/clients/client/state.py rename to fedn/fedn/network/clients/state.py index bb30e41a2..2afd85115 100644 --- a/fedn/fedn/clients/client/state.py +++ b/fedn/fedn/network/clients/state.py @@ -8,10 +8,12 @@ class ClientState(Enum): def ClientStateToString(state): - """ + """ Convert a ClientState to a string representation. - :param state: - :return: + :param state: the state to convert + :type state: :class:`fedn.network.clients.state.ClientState` + :return: string representation of the state + :rtype: str """ if state == ClientState.idle: return "IDLE" diff --git a/fedn/fedn/clients/client/__init__.py b/fedn/fedn/network/combiner/__init__.py similarity index 100% rename from fedn/fedn/clients/client/__init__.py rename to fedn/fedn/network/combiner/__init__.py diff --git a/fedn/fedn/clients/combiner/__init__.py b/fedn/fedn/network/combiner/aggregators/__init__.py similarity index 100% rename from fedn/fedn/clients/combiner/__init__.py rename to fedn/fedn/network/combiner/aggregators/__init__.py diff --git a/fedn/fedn/network/combiner/aggregators/aggregatorbase.py b/fedn/fedn/network/combiner/aggregators/aggregatorbase.py new file mode 100644 index 000000000..e075f7142 --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/aggregatorbase.py @@ -0,0 +1,138 @@ +import importlib +import json +import queue +from abc import ABC, abstractmethod + +import fedn.common.net.grpc.fedn_pb2 as fedn + +AGGREGATOR_PLUGIN_PATH = "fedn.network.combiner.aggregators.{}" + + +class AggregatorBase(ABC): + """ Abstract class defining an aggregator. """ + + @abstractmethod + def __init__(self, storage, server, modelservice, control): + """ Initialize the aggregator. + + :param id: A reference to id of :class: `fedn.network.combiner.Combiner` + :type id: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + :param control: A handle to the :class: `fedn.network.combiner.round.RoundController` + :type control: class: `fedn.network.combiner.round.RoundController` + """ + self.name = self.__class__.__name__ + self.storage = storage + self.server = server + self.modelservice = modelservice + self.control = control + self.model_updates = queue.Queue() + + @abstractmethod + def combine_models(self, nr_expected_models=None, nr_required_models=1, helper=None, timeout=180, delete_models=True): + """Routine for combining model updates. Implemented in subclass. + + :param nr_expected_models: Number of expected models. If None, wait for all models. + :type nr_expected_models: int + :param nr_required_models: Number of required models to combine. + :type nr_required_models: int + :param helper: A helper object. + :type helper: :class: `fedn.utils.plugins.helperbase.HelperBase` + :param timeout: Timeout in seconds to wait for models to be combined. + :type timeout: int + :param delete_models: Delete client models after combining. + :type delete_models: bool + :return: A combined model. + """ + pass + + def on_model_update(self, model_update): + """Callback when a new client model update is recieved. + Performs (optional) pre-processing and then puts the update id + on the aggregation queue. Override in subclass as needed. + + :param model_update: A ModelUpdate message. + :type model_id: str + """ + try: + self.server.report_status("AGGREGATOR({}): callback received model update {}".format(self.name, model_update.model_update_id), + log_level=fedn.Status.INFO) + + # Validate the update and metadata + valid_update = self._validate_model_update(model_update) + if valid_update: + # Push the model update to the processing queue + self.model_updates.put(model_update) + else: + self.server.report_status("AGGREGATOR({}): Invalid model update, skipping.".format(self.name)) + except Exception as e: + self.server.report_status("AGGREGATOR({}): Failed to receive model update! {}".format(self.name, e), + log_level=fedn.Status.WARNING) + pass + + def _validate_model_update(self, model_update): + """ Validate the model update. + + :param model_update: A ModelUpdate message. + :type model_update: object + :return: True if the model update is valid, False otherwise. + :rtype: bool + """ + # TODO: Validate the metadata to check that it contains all variables assumed by the aggregator. + data = json.loads(model_update.meta)['training_metadata'] + if 'num_examples' not in data.keys(): + self.server.report_status("AGGREGATOR({}): Model validation failed, num_examples missing in metadata.".format(self.name)) + return False + return True + + def next_model_update(self, helper): + """ Get the next model update from the queue. + + :param helper: A helper object. + :type helper: object + :return: A tuple containing the model update, metadata and model id. + :rtype: tuple + """ + model_update = self.model_updates.get(block=False) + model_id = model_update.model_update_id + model_next = self.control.load_model_update(helper, model_id) + # Get relevant metadata + data = json.loads(model_update.meta)['training_metadata'] + config = json.loads(json.loads(model_update.meta)['config']) + data['round_id'] = config['round_id'] + + return model_next, data, model_id + + def get_state(self): + """ Get the state of the aggregator's queue, including the number of model updates.""" + state = { + 'queue_len': self.model_updates.qsize() + + } + return state + + +def get_aggregator(aggregator_module_name, storage, server, modelservice, control): + """ Return an instance of the helper class. + + :param helper_module_name: The name of the helper plugin module. + :type helper_module_name: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + :param control: A handle to the :class: `fedn.network.combiner.round.RoundController` + :type control: class: `fedn.network.combiner.round.RoundController` + :return: An aggregator instance. + :rtype: class: `fedn.combiner.aggregators.AggregatorBase` + """ + aggregator_plugin = AGGREGATOR_PLUGIN_PATH.format(aggregator_module_name) + aggregator = importlib.import_module(aggregator_plugin) + return aggregator.Aggregator(storage, server, modelservice, control) diff --git a/fedn/fedn/network/combiner/aggregators/fedavg.py b/fedn/fedn/network/combiner/aggregators/fedavg.py new file mode 100644 index 000000000..0cd15b66a --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/fedavg.py @@ -0,0 +1,88 @@ +import fedn.common.net.grpc.fedn_pb2 as fedn +from fedn.network.combiner.aggregators.aggregatorbase import AggregatorBase + + +class Aggregator(AggregatorBase): + """ Local SGD / Federated Averaging (FedAvg) aggregator. Computes a weighted mean + of parameter updates. + + :param id: A reference to id of :class: `fedn.network.combiner.Combiner` + :type id: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + :param control: A handle to the :class: `fedn.network.combiner.round.RoundController` + :type control: class: `fedn.network.combiner.round.RoundController` + + """ + + def __init__(self, storage, server, modelservice, control): + """Constructor method""" + + super().__init__(storage, server, modelservice, control) + + self.name = "fedavg" + + def combine_models(self, helper=None, time_window=180, max_nr_models=100, delete_models=True): + """Aggregate model updates in the queue by computing an incremental + weighted average of parameters. + + :param helper: An instance of :class: `fedn.utils.helpers.HelperBase`, ML framework specific helper, defaults to None + :type helper: class: `fedn.utils.helpers.HelperBase`, optional + :param time_window: The time window for model aggregation, defaults to 180 + :type time_window: int, optional + :param max_nr_models: The maximum number of updates aggregated, defaults to 100 + :type max_nr_models: int, optional + :param delete_models: Delete models from storage after aggregation, defaults to True + :type delete_models: bool, optional + :return: The global model and metadata + :rtype: tuple + """ + + data = {} + data['time_model_load'] = 0.0 + data['time_model_aggregation'] = 0.0 + + model = None + nr_aggregated_models = 0 + total_examples = 0 + + self.server.report_status( + "AGGREGATOR({}): Aggregating model updates... ".format(self.name)) + + while not self.model_updates.empty(): + try: + # Get next model from queue + model_next, metadata, model_id = self.next_model_update(helper) + self.server.report_status( + "AGGREGATOR({}): Processing model update {}, metadata: {} ".format(self.name, model_id, metadata)) + + # Increment total number of examples + total_examples += metadata['num_examples'] + + if nr_aggregated_models == 0: + model = model_next + else: + model = helper.increment_average( + model, model_next, metadata['num_examples'], total_examples) + + nr_aggregated_models += 1 + # Delete model from storage + if delete_models: + self.modelservice.models.delete(model_id) + self.server.report_status( + "AGGREGATOR({}): Deleted model update {} from storage.".format(self.name, model_id)) + self.model_updates.task_done() + except Exception as e: + self.server.report_status( + "AGGREGATOR({}): Error encoutered while processing model update {}, skipping this update.".format(self.name, e)) + self.model_updates.task_done() + + data['nr_aggregated_models'] = nr_aggregated_models + + self.server.report_status("AGGREGATOR({}): Aggregation completed, aggregated {} models.".format(self.name, nr_aggregated_models), + log_level=fedn.Status.INFO) + return model, data diff --git a/fedn/fedn/network/combiner/aggregators/tests/test_fedavg.py b/fedn/fedn/network/combiner/aggregators/tests/test_fedavg.py new file mode 100644 index 000000000..55e5052b8 --- /dev/null +++ b/fedn/fedn/network/combiner/aggregators/tests/test_fedavg.py @@ -0,0 +1,33 @@ +import unittest +from unittest.mock import MagicMock + +from fedn.network.combiner.aggregators.fedavg import FedAvg + + +class TestFedAvg(unittest.TestCase): + """Test the FedAvg aggregator""" + + def SetUp(self): + pass + + def test_fedavg_init(self, *args, **kwargs): + """Test the FedAvg aggregator constructor""" + aggregator = FedAvg("id", None, None, None, None) + self.assertEqual(aggregator.name, "FedAvg") + + def test_fedavg_combine_models(self, *args, **kwargs): + """Test the FedAvg aggregator combine_models method with mock classes and methods""" + aggregator = FedAvg("id", None, None, None, None) + aggregator.next_model_update = MagicMock(return_value=(None, None, None)) + aggregator.server = MagicMock() + + data = {} + data['time_model_load'] = 0.0 + data['time_model_aggregation'] = 0.0 + data['nr_aggregated_models'] = 0 + + self.assertEqual(aggregator.combine_models(), (None, data)) + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/clients/reducer/interfaces.py b/fedn/fedn/network/combiner/interfaces.py similarity index 58% rename from fedn/fedn/clients/reducer/interfaces.py rename to fedn/fedn/network/combiner/interfaces.py index a307fe446..d70fc80d0 100644 --- a/fedn/fedn/clients/reducer/interfaces.py +++ b/fedn/fedn/network/combiner/interfaces.py @@ -14,11 +14,20 @@ class CombinerUnavailableError(Exception): class Channel: - """ + """ Wrapper for a gRPC channel. """ - """ + def __init__(self, address, port, certificate=None): + """ Create a channel. - def __init__(self, address, port, certificate): + If a valid certificate is given, a secure channel is created, else insecure. + + :parameter address: The address for the gRPC server. + :type address: str + :parameter port: The port for connecting to the gRPC server. + :type port: int + :parameter certificate: The certificate for connecting to the gRPC server (optional) + :type certificate: str + """ self.address = address self.port = port self.certificate = certificate @@ -33,19 +42,42 @@ def __init__(self, address, port, certificate): '{}:{}'.format(self.address, str(self.port))) def get_channel(self): - """ + """ Get a channel. - :return: + :return: An instance of a gRPC channel + :rtype: :class:`grpc.Channel` """ return copy.copy(self.channel) class CombinerInterface: - """ + """ Interface for the Combiner (aggregation server). + Abstraction on top of the gRPC server servicer. """ def __init__(self, parent, name, address, fqdn, port, certificate=None, key=None, ip=None, config=None): + """ Initialize the combiner interface. + + :parameter parent: The parent combiner. + :type parent: :class:`fedn.network.combiner.Combiner` + :parameter name: The name of the combiner. + :type name: str + :parameter address: The address of the combiner. + :type address: str + :parameter fqdn: The fully qualified domain name of the combiner. + :type fqdn: str + :parameter port: The port of the combiner. + :type port: int + :parameter certificate: The certificate of the combiner (optional). + :type certificate: str + :parameter key: The key of the combiner (optional). + :type key: str + :parameter ip: The ip of the combiner (optional). + :type ip: str + :parameter config: The configuration of the combiner (optional). + :type config: dict + """ self.parent = parent self.name = name self.address = address @@ -62,22 +94,22 @@ def __init__(self, parent, name, address, fqdn, port, certificate=None, key=None else: self.config = config - @classmethod - def from_statestore(statestore, name): - """ """ - @classmethod def from_json(combiner_config): - """ + """ Initialize the combiner config from a json document. - :return: + :parameter combiner_config: The combiner configuration. + :type combiner_config: dict + :return: An instance of the combiner interface. + :rtype: :class:`fedn.network.combiner.interfaces.CombinerInterface` """ return CombinerInterface(**combiner_config) def to_dict(self): - """ + """ Export combiner configuration to a dictionary. - :return: + : return: A dictionary with the combiner configuration. + : rtype: dict """ data = { @@ -88,7 +120,8 @@ def to_dict(self): 'port': self.port, 'ip': self.ip, 'certificate': None, - 'key': None + 'key': None, + 'config': self.config } if self.certificate: @@ -105,20 +138,45 @@ def to_dict(self): return data def to_json(self): - """ + """ Export combiner configuration to json. - :return: + :return: A json document with the combiner configuration. + :rtype: str """ return json.dumps(self.to_dict()) - def report(self, config=None): + def get_certificate(self): + """ Get combiner certificate. + + :return: The combiner certificate. + :rtype: str, None if no certificate is set. """ + if self.certificate: + cert_b64 = base64.b64encode(self.certificate) + return str(cert_b64).split('\'')[1] + else: + return None + + def get_key(self): + """ Get combiner key. + + :return: The combiner key. + :rtype: str, None if no key is set. + """ + if self.key: + key_b64 = base64.b64encode(self.key) + return str(key_b64).split('\'')[1] + else: + return None + + def report(self): + """ Recieve a status report from the combiner. - :param config: - :return: + :return: A dictionary describing the combiner state. + :rtype: dict + + :raises CombinerUnavailableError: If the combiner is unavailable. """ - print(f"Trying to create Report channel to gRPC server at: address {self.address} port {self.port}", flush=True) - print(f"Certificate: {self.certificate}", flush=True) channel = Channel(self.address, self.port, self.certificate).get_channel() control = rpc.ControlStub(channel) @@ -136,9 +194,11 @@ def report(self, config=None): raise def configure(self, config=None): - """ + """ Configure the combiner. + Set the parameters in config at the server. - :param config: + :param config: A dictionary containing parameters. + :type config: dict """ if not config: config = self.config @@ -160,83 +220,65 @@ def configure(self, config=None): else: raise - def start(self, config): - """ + def flush_model_update_queue(self): + """ Reset the model update queue on the combiner. """ - :param config: - :return: - """ channel = Channel(self.address, self.port, self.certificate).get_channel() control = rpc.ControlStub(channel) + request = fedn.ControlRequest() - request.command = fedn.Command.START - for k, v in config.items(): - p = request.parameter.add() - p.key = str(k) - p.value = str(v) try: - response = control.Start(request) + control.FlushAggregationQueue(request) except grpc.RpcError as e: if e.code() == grpc.StatusCode.UNAVAILABLE: raise CombinerUnavailableError else: raise - print("Response from combiner {}".format(response.message)) - return response - - def set_model_id(self, model_id): - """ + def submit(self, config): + """ Submit a compute plan to the combiner. - :param model_id: + :param config: The job configuration. + :type config: dict + :return: Server ControlResponse object. + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` """ channel = Channel(self.address, self.port, self.certificate).get_channel() control = rpc.ControlStub(channel) request = fedn.ControlRequest() - p = request.parameter.add() - p.key = 'model_id' - p.value = str(model_id) + request.command = fedn.Command.START + for k, v in config.items(): + p = request.parameter.add() + p.key = str(k) + p.value = str(v) try: - control.Configure(request) + response = control.Start(request) except grpc.RpcError as e: if e.code() == grpc.StatusCode.UNAVAILABLE: raise CombinerUnavailableError else: raise - def get_model_id(self): - """ + return response - :return: - """ - channel = Channel(self.address, self.port, - self.certificate).get_channel() - reducer = rpc.ReducerStub(channel) - request = fedn.GetGlobalModelRequest() - try: - response = reducer.GetGlobalModel(request) - except grpc.RpcError as e: - if e.code() == grpc.StatusCode.UNAVAILABLE: - raise CombinerUnavailableError - else: - raise + def get_model(self, id): + """ Download a model from the combiner server. - return response.model_id - def get_model(self, id=None): - """ Retrive the model bundle from a combiner. """ + :param id: The model id. + :type id: str + :return: A file-like object containing the model. + :rtype: :class:`io.BytesIO`, None if the model is not available. + """ channel = Channel(self.address, self.port, self.certificate).get_channel() modelservice = rpc.ModelServiceStub(channel) - if not id: - id = self.get_model_id() - data = BytesIO() data.seek(0, 0) @@ -250,9 +292,10 @@ def get_model(self, id=None): return None def allowing_clients(self): - """ + """ Check if the combiner is allowing additional client connections. - :return: + :return: True if accepting, else False. + :rtype: bool """ channel = Channel(self.address, self.port, self.certificate).get_channel() diff --git a/fedn/fedn/clients/combiner/modelservice.py b/fedn/fedn/network/combiner/modelservice.py similarity index 54% rename from fedn/fedn/clients/combiner/modelservice.py rename to fedn/fedn/network/combiner/modelservice.py index abdb80d6a..7d29d0d00 100644 --- a/fedn/fedn/clients/combiner/modelservice.py +++ b/fedn/fedn/network/combiner/modelservice.py @@ -1,3 +1,5 @@ +import os +import tempfile from io import BytesIO import fedn.common.net.grpc.fedn_pb2 as fedn @@ -8,28 +10,71 @@ class ModelService(rpc.ModelServiceServicer): - """ + """ Service for handling download and upload of models to the server. """ def __init__(self): self.models = TempModelStorage() - # self.models = defaultdict(io.BytesIO) - # self.models_metadata = {} def exist(self, model_id): - """ + """ Check if a model exists on the server. - :param model_id: - :return: + :param model_id: The model id. + :return: True if the model exists, else False. """ return self.models.exist(model_id) - def get_model(self, id): + def get_tmp_path(self): + """ Return a temporary output path compatible with save_model, load_model. """ + fd, path = tempfile.mkstemp() + os.close(fd) + return path + + def load_model_from_BytesIO(self, model_bytesio, helper): + """ Load a model from a BytesIO object. + + :param model_bytesio: A BytesIO object containing the model. + :type model_bytesio: :class:`io.BytesIO` + :param helper: The helper object for the model. + :type helper: :class:`fedn.utils.helperbase.HelperBase` + :return: The model object. + :rtype: return type of helper.load """ + path = self.get_tmp_path() + with open(path, 'wb') as fh: + fh.write(model_bytesio) + fh.flush() + model = helper.load(path) + os.unlink(path) + return model + + def serialize_model_to_BytesIO(self, model, helper): + """ Serialize a model to a BytesIO object. + + :param model: The model object. + :type model: return type of helper.load + :param helper: The helper object for the model. + :type helper: :class:`fedn.utils.helperbase.HelperBase` + :return: A BytesIO object containing the model. + :rtype: :class:`io.BytesIO` + """ + outfile_name = helper.save(model) + + a = BytesIO() + a.seek(0, 0) + with open(outfile_name, 'rb') as f: + a.write(f.read()) + os.unlink(outfile_name) + return a + + def get_model(self, id): + """ Download model with id 'id' from server. - :param id: - :return: + :param id: The model id. + :type id: str + :return: A BytesIO object containing the model. + :rtype: :class:`io.BytesIO`, None if model does not exist. """ data = BytesIO() @@ -46,12 +91,13 @@ def get_model(self, id): return None def set_model(self, model, id): - """ + """ Upload model to server. - :param model: - :param id: + :param model: A model object (BytesIO) + :type model: :class:`io.BytesIO` + :param id: The model id. + :type id: str """ - if not isinstance(model, BytesIO): bt = BytesIO() @@ -86,13 +132,16 @@ def upload_request_generator(mdl): # Model Service def Upload(self, request_iterator, context): + """ RPC endpoints for uploading a model. + + :param request_iterator: The model request iterator. + :type request_iterator: :class:`fedn.common.net.grpc.fedn_pb2.ModelRequest` + :param context: The context object (unused) + :type context: :class:`grpc._server._Context` + :return: A model response object. + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ModelResponse` """ - :param request_iterator: - :param context: - :return: - """ - # print("STARTING UPLOAD!", flush=True) result = None for request in request_iterator: if request.status == fedn.ModelStatus.IN_PROGRESS: @@ -109,18 +158,21 @@ def Upload(self, request_iterator, context): return result def Download(self, request, context): - """ - - :param request: - :param context: - :return: + """ RPC endpoints for downloading a model. + + :param request: The model request object. + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelRequest` + :param context: The context object (unused) + :type context: :class:`grpc._server._Context` + :return: A model response iterator. + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ModelResponse` """ try: if self.models.get_meta(request.id) != fedn.ModelStatus.OK: print("Error file is not ready", flush=True) yield fedn.ModelResponse(id=request.id, data=None, status=fedn.ModelStatus.FAILED) except Exception: - print("Error file does not exist", flush=True) + print("Error file does not exist: {}".format(request.id), flush=True) yield fedn.ModelResponse(id=request.id, data=None, status=fedn.ModelStatus.FAILED) try: @@ -133,4 +185,4 @@ def Download(self, request, context): return yield fedn.ModelResponse(id=request.id, data=piece, status=fedn.ModelStatus.IN_PROGRESS) except Exception as e: - print("Downloading went wrong! {}".format(e), flush=True) + print("Downloading went wrong: {} {}".format(request.id, e), flush=True) diff --git a/fedn/fedn/network/combiner/round.py b/fedn/fedn/network/combiner/round.py new file mode 100644 index 000000000..a19adc9db --- /dev/null +++ b/fedn/fedn/network/combiner/round.py @@ -0,0 +1,386 @@ +import queue +import random +import sys +import time +import uuid + +from fedn.network.combiner.aggregators.aggregatorbase import get_aggregator +from fedn.utils.helpers import get_helper + + +class ModelUpdateError(Exception): + pass + + +class RoundController: + """ Round controller. + + The round controller recieves round configurations from the global controller + and coordinates model updates and aggregation, and model validations. + + :param aggregator_name: The name of the aggregator plugin module. + :type aggregator_name: str + :param storage: Model repository for :class: `fedn.network.combiner.Combiner` + :type storage: class: `fedn.common.storage.s3.s3repo.S3ModelRepository` + :param server: A handle to the Combiner class :class: `fedn.network.combiner.Combiner` + :type server: class: `fedn.network.combiner.Combiner` + :param modelservice: A handle to the model service :class: `fedn.network.combiner.modelservice.ModelService` + :type modelservice: class: `fedn.network.combiner.modelservice.ModelService` + """ + + def __init__(self, aggregator_name, storage, server, modelservice): + + self.round_configs = queue.Queue() + self.storage = storage + self.server = server + self.modelservice = modelservice + self.aggregator = get_aggregator(aggregator_name, self.storage, self.server, self.modelservice, self) + + def push_round_config(self, round_config): + """Add a round_config (job description) to the inbox. + + :param round_config: A dict containing the round configuration (from global controller). + :type round_config: dict + :return: A job id (universally unique identifier) for the round. + :rtype: str + """ + try: + round_config['_job_id'] = str(uuid.uuid4()) + self.round_configs.put(round_config) + except Exception: + self.server.report_status( + "ROUNDCONTROL: Failed to push round config.", flush=True) + raise + return round_config['_job_id'] + + def load_model_update(self, helper, model_id): + """Load model update in its native format. + + :param helper: An instance of :class: `fedn.utils.helpers.HelperBase`, ML framework specific helper, defaults to None + :type helper: class: `fedn.utils.helpers.HelperBase` + :param model_id: The ID of the model update, UUID in str format + :type model_id: str + """ + + model_str = self.load_model_update_str(model_id) + if model_str: + try: + model = self.modelservice.load_model_from_BytesIO(model_str.getbuffer(), helper) + except IOError: + self.server.report_status( + "AGGREGATOR({}): Failed to load model!".format(self.name)) + else: + raise ModelUpdateError("Failed to load model.") + + return model + + def load_model_update_str(self, model_id, retry=3): + """Load model update object and return it as BytesIO. + + :param model_id: The ID of the model + :type model_id: str + :param retry: number of times retrying load model update, defaults to 3 + :type retry: int, optional + :return: Updated model + :rtype: class: `io.BytesIO` + """ + # Try reading model update from local disk/combiner memory + model_str = self.modelservice.models.get(model_id) + # And if we cannot access that, try downloading from the server + if model_str is None: + model_str = self.modelservice.get_model(model_id) + # TODO: use retrying library + tries = 0 + while tries < retry: + tries += 1 + if not model_str or sys.getsizeof(model_str) == 80: + self.server.report_status( + "ROUNDCONTROL: Model download failed. retrying", flush=True) + + time.sleep(1) + model_str = self.modelservice.get_model(model_id) + + return model_str + + def waitforit(self, config, buffer_size=100, polling_interval=0.1): + """ Defines the policy for how long the server should wait before starting to aggregate models. + + The policy is as follows: + 1. Wait a maximum of time_window time until the round times out. + 2. Terminate if a preset number of model updates (buffer_size) are in the queue. + + :param config: The round config object + :type config: dict + :param buffer_size: The number of model updates to wait for before starting aggregation, defaults to 100 + :type buffer_size: int, optional + :param polling_interval: The polling interval, defaults to 0.1 + :type polling_interval: float, optional + """ + + time_window = float(config['round_timeout']) + + tt = 0.0 + while tt < time_window: + if self.aggregator.model_updates.qsize() >= buffer_size: + break + + time.sleep(polling_interval) + tt += polling_interval + + def _training_round(self, config, clients): + """Send model update requests to clients and aggregate results. + + :param config: The round config object (passed to the client). + :type config: dict + :param clients: clients to participate in the training round + :type clients: list + :return: an aggregated model and associated metadata + :rtype: model, dict + """ + + self.server.report_status( + "ROUNDCONTROL: Initiating training round, participating clients: {}".format(clients)) + + meta = {} + meta['nr_expected_updates'] = len(clients) + meta['nr_required_updates'] = int(config['clients_required']) + meta['timeout'] = float(config['round_timeout']) + + # Request model updates from all active clients. + self.server.request_model_update(config, clients=clients) + + # If buffer_size is -1 (default), the round terminates when/if all clients have completed. + if int(config['buffer_size']) == -1: + buffer_size = len(clients) + else: + buffer_size = int(config['buffer_size']) + + # Wait / block until the round termination policy has been met. + self.waitforit(config, buffer_size=buffer_size) + + tic = time.time() + model = None + data = None + + try: + helper = get_helper(config['helper_type']) + print("ROUNDCONTROL: Config delete_models_storage: {}".format(config['delete_models_storage']), flush=True) + if config['delete_models_storage'] == 'True': + delete_models = True + else: + delete_models = False + model, data = self.aggregator.combine_models(helper=helper, + delete_models=delete_models) + except Exception as e: + print("AGGREGATION FAILED AT COMBINER! {}".format(e), flush=True) + + meta['time_combination'] = time.time() - tic + meta['aggregation_time'] = data + return model, meta + + def _validation_round(self, config, clients, model_id): + """Send model validation requests to clients. + + :param config: The round config object (passed to the client). + :type config: dict + :param clients: clients to send validation requests to + :type clients: list + :param model_id: The ID of the model to validate + :type model_id: str + """ + self.server.request_model_validation(model_id, config, clients) + + def stage_model(self, model_id, timeout_retry=3, retry=2): + """Download a model from persistent storage and set in modelservice. + + :param model_id: ID of the model update object to stage. + :type model_id: str + :param timeout_retry: Sleep before retrying download again(sec), defaults to 3 + :type timeout_retry: int, optional + :param retry: Number of retries, defaults to 2 + :type retry: int, optional + """ + + # If the model is already in memory at the server we do not need to do anything. + if self.modelservice.models.exist(model_id): + print("MODEL EXISTST (NOT)", flush=True) + return + print("MODEL STAGING", flush=True) + # If not, download it and stage it in memory at the combiner. + tries = 0 + while True: + try: + model = self.storage.get_model_stream(model_id) + if model: + break + except Exception: + self.server.report_status("ROUNDCONTROL: Could not fetch model from storage backend, retrying.", + flush=True) + time.sleep(timeout_retry) + tries += 1 + if tries > retry: + self.server.report_status( + "ROUNDCONTROL: Failed to stage model {} from storage backend!".format(model_id), flush=True) + return + + self.modelservice.set_model(model, model_id) + + def _assign_round_clients(self, n, type="trainers"): + """ Obtain a list of clients(trainers or validators) to ask for updates in this round. + + :param n: Size of a random set taken from active trainers(clients), if n > "active trainers" all is used + :type n: int + :param type: type of clients, either "trainers" or "validators", defaults to "trainers" + :type type: str, optional + :return: Set of clients + :rtype: list + """ + + if type == "validators": + clients = self.server.get_active_validators() + elif type == "trainers": + clients = self.server.get_active_trainers() + else: + self.server.report_status( + "ROUNDCONTROL(ERROR): {} is not a supported type of client".format(type), flush=True) + raise + + # If the number of requested trainers exceeds the number of available, use all available. + if n > len(clients): + n = len(clients) + + # If not, we pick a random subsample of all available clients. + clients = random.sample(clients, n) + + return clients + + def _check_nr_round_clients(self, config, timeout=0.0): + """Check that the minimal number of clients required to start a round are available. + + :param config: The round config object. + :type config: dict + :param timeout: Timeout in seconds, defaults to 0.0 + :type timeout: float, optional + :return: True if the required number of clients are available, False otherwise. + :rtype: bool + """ + + ready = False + t = 0.0 + while not ready: + active = self.server.nr_active_trainers() + + if active >= int(config['clients_requested']): + return True + else: + self.server.report_status("waiting for {} clients to get started, currently: {}".format( + int(config['clients_requested']) - active, + active), flush=True) + if t >= timeout: + if active >= int(config['clients_required']): + return True + else: + return False + + time.sleep(1.0) + t += 1.0 + + return ready + + def execute_validation_round(self, round_config): + """ Coordinate validation rounds as specified in config. + + :param round_config: The round config object. + :type round_config: dict + """ + model_id = round_config['model_id'] + self.server.report_status( + "COMBINER orchestrating validation of model {}".format(model_id)) + self.stage_model(model_id) + validators = self._assign_round_clients( + self.server.max_clients, type="validators") + self._validation_round(round_config, validators, model_id) + + def execute_training_round(self, config): + """ Coordinates clients to execute training tasks. + + :param config: The round config object. + :type config: dict + :return: metadata about the training round. + :rtype: dict + """ + + self.server.report_status( + "ROUNDCONTROL: Processing training round, job_id {}".format(config['_job_id']), flush=True) + + data = {} + data['config'] = config + data['round_id'] = config['round_id'] + + # Make sure the model to update is available on this combiner. + self.stage_model(config['model_id']) + + clients = self._assign_round_clients(self.server.max_clients) + model, meta = self._training_round(config, clients) + data['data'] = meta + + if model is None: + self.server.report_status( + "\t Failed to update global model in round {0}!".format(config['round_id'])) + + if model is not None: + helper = get_helper(config['helper_type']) + a = self.modelservice.serialize_model_to_BytesIO(model, helper) + # Send aggregated model to server + model_id = str(uuid.uuid4()) + self.modelservice.set_model(a, model_id) + a.close() + data['model_id'] = model_id + + self.server.report_status( + "ROUNDCONTROL: TRAINING ROUND COMPLETED. Aggregated model id: {}, Job id: {}".format(model_id, config['_job_id']), flush=True) + + return data + + def run(self, polling_interval=1.0): + """ Main control loop. Execute rounds based on round config on the queue. + + :param polling_interval: The polling interval in seconds for checking if a new job/config is available. + :type polling_interval: float + """ + try: + while True: + try: + round_config = self.round_configs.get(block=False) + + # Check that the minimum allowed number of clients are connected + ready = self._check_nr_round_clients(round_config) + round_meta = {} + + if ready: + if round_config['task'] == 'training': + tic = time.time() + round_meta = self.execute_training_round(round_config) + round_meta['time_exec_training'] = time.time() - \ + tic + round_meta['status'] = "Success" + round_meta['name'] = self.server.id + self.server.tracer.set_round_combiner_data(round_meta) + elif round_config['task'] == 'validation' or round_config['task'] == 'inference': + self.execute_validation_round(round_config) + else: + self.server.report_status( + "ROUNDCONTROL: Round config contains unkown task type.", flush=True) + else: + round_meta = {} + round_meta['status'] = "Failed" + round_meta['reason'] = "Failed to meet client allocation requirements for this round config." + self.server.report_status( + "ROUNDCONTROL: {0}".format(round_meta['reason']), flush=True) + + self.round_configs.task_done() + except queue.Empty: + time.sleep(polling_interval) + + except (KeyboardInterrupt, SystemExit): + pass diff --git a/fedn/fedn/network/combiner/server.py b/fedn/fedn/network/combiner/server.py new file mode 100644 index 000000000..f5449a375 --- /dev/null +++ b/fedn/fedn/network/combiner/server.py @@ -0,0 +1,866 @@ +import base64 +import json +import queue +import re +import signal +import sys +import threading +import time +import uuid +from datetime import datetime, timedelta +from enum import Enum + +import fedn.common.net.grpc.fedn_pb2 as fedn +import fedn.common.net.grpc.fedn_pb2_grpc as rpc +from fedn.common.net.connect import ConnectorCombiner, Status +from fedn.common.net.grpc.server import Server +from fedn.common.storage.s3.s3repo import S3ModelRepository +from fedn.common.tracer.mongotracer import MongoTracer +from fedn.network.combiner.modelservice import ModelService +from fedn.network.combiner.round import RoundController + +VALID_NAME_REGEX = '^[a-zA-Z0-9_-]*$' + + +class Role(Enum): + """ Enum for combiner roles. """ + WORKER = 1 + COMBINER = 2 + REDUCER = 3 + OTHER = 4 + + +def role_to_proto_role(role): + """ Convert a Role to a proto Role. + + :param role: the role to convert + :type role: :class:`fedn.network.combiner.server.Role` + :return: proto role + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Role` + """ + if role == Role.COMBINER: + return fedn.COMBINER + if role == Role.WORKER: + return fedn.WORKER + if role == Role.REDUCER: + return fedn.REDUCER + if role == Role.OTHER: + return fedn.OTHER + + +class Combiner(rpc.CombinerServicer, rpc.ReducerServicer, rpc.ConnectorServicer, rpc.ControlServicer): + """ Combiner gRPC server. """ + + def __init__(self, config): + """ Initialize a Combiner. + + :param config: configuration for the combiner + :type config: dict + """ + + # Client queues + self.clients = {} + + self.modelservice = ModelService() + + # Validate combiner name + match = re.search(VALID_NAME_REGEX, config['name']) + if not match: + raise ValueError('Unallowed character in combiner name. Allowed characters: a-z, A-Z, 0-9, _, -.') + + self.id = config['name'] + self.role = Role.COMBINER + self.max_clients = config['max_clients'] + + # Connector to announce combiner to discover service (reducer) + announce_client = ConnectorCombiner(host=config['discover_host'], + port=config['discover_port'], + myhost=config['host'], + fqdn=config['fqdn'], + myport=config['port'], + token=config['token'], + name=config['name'], + secure=config['secure'], + verify=config['verify']) + + response = None + while True: + # announce combiner to discover service + status, response = announce_client.announce() + if status == Status.TryAgain: + print(response, flush=True) + time.sleep(5) + continue + if status == Status.Assigned: + announce_config = response + print( + "COMBINER {0}: Announced successfully".format(self.id), flush=True) + break + if status == Status.UnAuthorized: + print(response, flush=True) + sys.exit("Exiting: Unauthorized") + + cert = announce_config['certificate'] + key = announce_config['key'] + + if announce_config['certificate']: + cert = base64.b64decode(announce_config['certificate']) # .decode('utf-8') + key = base64.b64decode(announce_config['key']) # .decode('utf-8') + + # Set up gRPC server configuration + grpc_config = {'port': config['port'], + 'secure': config['secure'], + 'certificate': cert, + 'key': key} + + # Set up model repository + self.repository = S3ModelRepository( + announce_config['storage']['storage_config']) + + # Create gRPC server + self.server = Server(self, self.modelservice, grpc_config) + + # Set up tracer for statestore + self.tracer = MongoTracer( + announce_config['statestore']['mongo_config'], announce_config['statestore']['network_id']) + + # Set up round controller + self.control = RoundController(config['aggregator'], self.repository, self, self.modelservice) + + # Start thread for round controller + threading.Thread(target=self.control.run, daemon=True).start() + + # Start the gRPC server + self.server.start() + + def __whoami(self, client, instance): + """ Set the client id and role in a proto message. + + :param client: the client to set the id and role for + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param instance: the instance to get the id and role from + :type instance: :class:`fedn.network.combiner.server.Combiner` + :return: the client with id and role set + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Client` + """ + client.name = instance.id + client.role = role_to_proto_role(instance.role) + return client + + def report_status(self, msg, log_level=fedn.Status.INFO, type=None, request=None, flush=True): + """ Report status of the combiner. + + :param msg: the message to report + :type msg: str + :param log_level: the log level to report at + :type log_level: :class:`fedn.common.net.grpc.fedn_pb2.Status` + :param type: the type of status to report + :type type: :class:`fedn.common.net.grpc.fedn_pb2.Status.Type` + :param request: the request to report status for + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param flush: whether to flush the message to stdout + :type flush: bool + """ + print("{}:COMBINER({}):{} {}".format(datetime.now().strftime( + '%Y-%m-%d %H:%M:%S'), self.id, log_level, msg), flush=flush) + + def request_model_update(self, config, clients=[]): + """ Ask clients to update the current global model. + + :param config: the model configuration to send to clients + :type config: dict + :param clients: the clients to send the request to + :type clients: list + + """ + + request = fedn.ModelUpdateRequest() + self.__whoami(request.sender, self) + request.model_id = config['model_id'] + request.correlation_id = str(uuid.uuid4()) + request.timestamp = str(datetime.now()) + request.data = json.dumps(config) + + if len(clients) == 0: + clients = self.get_active_trainers() + + for client in clients: + request.receiver.name = client.name + request.receiver.role = fedn.WORKER + _ = self.SendModelUpdateRequest(request, self) + # TODO: Check response + + print("COMBINER: Sent model update request for model {} to clients {}".format( + request.model_id, clients), flush=True) + + def request_model_validation(self, model_id, config, clients=[]): + """ Ask clients to validate the current global model. + + :param model_id: the model id to validate + :type model_id: str + :param config: the model configuration to send to clients + :type config: dict + :param clients: the clients to send the request to + :type clients: list + + """ + + request = fedn.ModelValidationRequest() + self.__whoami(request.sender, self) + request.model_id = model_id + request.correlation_id = str(uuid.uuid4()) + request.timestamp = str(datetime.now()) + request.is_inference = (config['task'] == 'inference') + + if len(clients) == 0: + clients = self.get_active_validators() + + for client in clients: + request.receiver.name = client.name + request.receiver.role = fedn.WORKER + self.SendModelValidationRequest(request, self) + + print("COMBINER: Sent validation request for model {} to clients {}".format( + model_id, clients), flush=True) + + def _list_clients(self, channel): + """ List active clients on a channel. + + :param channel: the channel to list clients for, for example MODEL_UPDATE_REQUESTS + :type channel: :class:`fedn.common.net.grpc.fedn_pb2.Channel` + :return: the list of active clients + :rtype: list + """ + request = fedn.ListClientsRequest() + self.__whoami(request.sender, self) + request.channel = channel + clients = self.ListActiveClients(request, self) + return clients.client + + def get_active_trainers(self): + """ Get a list of active trainers. + + :return: the list of active trainers + :rtype: list + """ + trainers = self._list_clients(fedn.Channel.MODEL_UPDATE_REQUESTS) + return trainers + + def get_active_validators(self): + """ Get a list of active validators. + + :return: the list of active validators + :rtype: list + """ + validators = self._list_clients(fedn.Channel.MODEL_VALIDATION_REQUESTS) + return validators + + def nr_active_trainers(self): + """ Get the number of active trainers. + + :return: the number of active trainers + :rtype: int + """ + return len(self.get_active_trainers()) + + def nr_active_validators(self): + """ Get the number of active validators. + + :return: the number of active validators + :rtype: int + """ + return len(self.get_active_validators()) + + #################################################################################################################### + + def __join_client(self, client): + """ Add a client to the list of active clients. + + :param client: the client to add + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + """ + if client.name not in self.clients.keys(): + self.clients[client.name] = {"lastseen": datetime.now()} + + def _subscribe_client_to_queue(self, client, queue_name): + """ Subscribe a client to the queue. + + :param client: the client to subscribe + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param queue_name: the name of the queue to subscribe to + :type queue_name: str + """ + self.__join_client(client) + if queue_name not in self.clients[client.name].keys(): + self.clients[client.name][queue_name] = queue.Queue() + + def __get_queue(self, client, queue_name): + """ Get the queue for a client. + + :param client: the client to get the queue for + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param queue_name: the name of the queue to get + :type queue_name: str + :return: the queue + :rtype: :class:`queue.Queue` + + :raises KeyError: if the queue does not exist + """ + try: + return self.clients[client.name][queue_name] + except KeyError: + raise + + def _send_request(self, request, queue_name): + """ Send a request to a client. + + :param request: the request to send + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param queue_name: the name of the queue to send the request to + :type queue_name: str + """ + self.__route_request_to_client(request, request.receiver, queue_name) + + def _broadcast_request(self, request, queue_name): + """ Publish a request to all subscribed members. + + :param request: the request to send + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param queue_name: the name of the queue to send the request to + :type queue_name: str + """ + active_clients = self._list_active_clients() + for client in active_clients: + self.clients[client.name][queue_name].put(request) + + def __route_request_to_client(self, request, client, queue_name): + """ Route a request to a client. + + :param request: the request to send + :type request: :class:`fedn.common.net.grpc.fedn_pb2.Request` + :param client: the client to send the request to + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + :param queue_name: the name of the queue to send the request to + :type queue_name: str + + :raises Exception: if the request could not be routed, direct cause of KeyError in __get_queue + """ + try: + q = self.__get_queue(client, queue_name) + q.put(request) + except Exception: + print("Failed to route request to client: {} {}", + request.receiver, queue_name) + raise + + def _send_status(self, status): + """ Report a status to tracer. + + :param status: the status to report + :type status: :class:`fedn.common.net.grpc.fedn_pb2.Status` + """ + + self.tracer.report_status(status) + + def __register_heartbeat(self, client): + """ Register a client if first time connecting. Update heartbeat timestamp. + + :param client: the client to register + :type client: :class:`fedn.common.net.grpc.fedn_pb2.Client` + """ + self.__join_client(client) + self.clients[client.name]["lastseen"] = datetime.now() + + def flush_model_update_queue(self): + """Clear the model update queue (aggregator). """ + + q = self.control.aggregator.model_updates + try: + with q.mutex: + q.queue.clear() + q.all_tasks_done.notify_all() + q.unfinished_tasks = 0 + return True + except Exception: + return False + + ##################################################################################################################### + + # Control Service + + def Start(self, control: fedn.ControlRequest, context): + """ Start a round of federated learning" + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + print("\nRECIEVED **START** from Controller {}\n".format(control.command), flush=True) + + config = {} + for parameter in control.parameter: + config.update({parameter.key: parameter.value}) + + job_id = self.control.push_round_config(config) + + response = fedn.ControlResponse() + p = response.parameter.add() + p.key = "job_id" + p.value = job_id + + return response + + # RPCs related to remote configuration of the server, round controller, + # aggregator and their states. + + def Configure(self, control: fedn.ControlRequest, context): + """ Configure the Combiner. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + for parameter in control.parameter: + setattr(self, parameter.key, parameter.value) + + response = fedn.ControlResponse() + return response + + def FlushAggregationQueue(self, control: fedn.ControlRequest, context): + """ Flush the queue. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + + status = self.flush_model_update_queue() + + response = fedn.ControlResponse() + if status: + response.message = 'Success' + else: + response.message = 'Failed' + + return response + + ############################################################################## + + def Stop(self, control: fedn.ControlRequest, context): + """ TODO: Not yet implemented. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + response = fedn.ControlResponse() + print("\n RECIEVED **STOP** from Controller\n", flush=True) + return response + + def Report(self, control: fedn.ControlRequest, context): + """ Describe current state of the Combiner. + + :param control: the control request + :type control: :class:`fedn.common.net.grpc.fedn_pb2.ControlRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the control response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ControlResponse` + """ + + response = fedn.ControlResponse() + self.report_status("\n RECIEVED **REPORT** from Controller\n", + log_level=fedn.Status.INFO) + + control_state = self.control.aggregator.get_state() + self.report_status("Aggregator state: {}".format(control_state), log_level=fedn.Status.INFO) + p = response.parameter.add() + for key, value in control_state.items(): + p.key = str(key) + p.value = str(value) + + active_trainers = self.get_active_trainers() + p = response.parameter.add() + p.key = "nr_active_trainers" + p.value = str(len(active_trainers)) + + active_validators = self.get_active_validators() + p = response.parameter.add() + p.key = "nr_active_validators" + p.value = str(len(active_validators)) + + active_trainers_ = self.get_active_trainers() + active_trainers = [] + for client in active_trainers_: + active_trainers.append(client) + p = response.parameter.add() + p.key = "active_trainers" + p.value = str(active_trainers) + + active_validators_ = self.get_active_validators() + active_validators = [] + for client in active_validators_: + active_validators.append(client) + p = response.parameter.add() + p.key = "active_validators" + p.value = str(active_validators) + + p = response.parameter.add() + p.key = "nr_active_clients" + p.value = str(len(active_trainers)+len(active_validators)) + + p = response.parameter.add() + p.key = "nr_unprocessed_compute_plans" + p.value = str(self.control.round_configs.qsize()) + + p = response.parameter.add() + p.key = "name" + p.value = str(self.id) + + return response + + ##################################################################################################################### + + def SendStatus(self, status: fedn.Status, context): + """ A client stream RPC endpoint that accepts status messages. + + :param status: the status message + :type status: :class:`fedn.common.net.grpc.fedn_pb2.Status` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + + self._send_status(status) + + response = fedn.Response() + response.response = "Status received." + return response + + def _list_subscribed_clients(self, queue_name): + """ List all clients subscribed to a queue. + + :param queue_name: the name of the queue + :type queue_name: str + :return: a list of client names + :rtype: list + """ + subscribed_clients = [] + for name, client in self.clients.items(): + if queue_name in client.keys(): + subscribed_clients.append(name) + return subscribed_clients + + def _list_active_clients(self, channel): + """ List all clients that have sent a status message in the last 10 seconds. + + :param channel: the name of the channel + :type channel: str + :return: a list of client names + :rtype: list + """ + active_clients = [] + for client in self._list_subscribed_clients(channel): + # This can break with different timezones. + now = datetime.now() + then = self.clients[client]["lastseen"] + # TODO: move the heartbeat timeout to config. + if (now - then) < timedelta(seconds=10): + active_clients.append(client) + return active_clients + + def _drop_inactive_clients(self): + """ TODO: Not implemented. Clean up clients that have missed the heartbeat. """ + + def ListActiveClients(self, request: fedn.ListClientsRequest, context): + """ RPC endpoint that returns a ClientList containing the names of all active clients. + An active client has sent a status message / responded to a heartbeat + request in the last 10 seconds. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ListClientsRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the client list + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ClientList` + """ + clients = fedn.ClientList() + active_clients = self._list_active_clients(request.channel) + + for client in active_clients: + clients.client.append(fedn.Client(name=client, role=fedn.WORKER)) + return clients + + def AcceptingClients(self, request: fedn.ConnectionRequest, context): + """ RPC endpoint that returns a ConnectionResponse indicating whether the server + is accepting clients or not. + + :param request: the request (unused) + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ConnectionRequest` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.ConnectionResponse` + """ + response = fedn.ConnectionResponse() + active_clients = self._list_active_clients( + fedn.Channel.MODEL_UPDATE_REQUESTS) + + try: + requested = int(self.max_clients) + if len(active_clients) >= requested: + response.status = fedn.ConnectionStatus.NOT_ACCEPTING + return response + if len(active_clients) < requested: + response.status = fedn.ConnectionStatus.ACCEPTING + return response + + except Exception as e: + print("Combiner not properly configured! {}".format(e), flush=True) + raise + + response.status = fedn.ConnectionStatus.TRY_AGAIN_LATER + return response + + def SendHeartbeat(self, heartbeat: fedn.Heartbeat, context): + """ RPC that lets clients send a hearbeat, notifying the server that + the client is available. + + :param heartbeat: the heartbeat + :type heartbeat: :class:`fedn.common.net.grpc.fedn_pb2.Heartbeat` + :param context: the context (unused) + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self.__register_heartbeat(heartbeat.sender) + response = fedn.Response() + response.sender.name = heartbeat.sender.name + response.sender.role = heartbeat.sender.role + response.response = "Heartbeat received" + return response + + # Combiner Service + + def ModelUpdateStream(self, update, context): + """ Model update stream RPC endpoint. Update status for client is connecting to stream. + + :param update: the update message + :type update: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdate` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + client = update.sender + status = fedn.Status( + status="Client {} connecting to ModelUpdateStream.".format(client.name)) + status.log_level = fedn.Status.INFO + status.sender.name = self.id + status.sender.role = role_to_proto_role(self.role) + + self._subscribe_client_to_queue(client, fedn.Channel.MODEL_UPDATES) + q = self.__get_queue(client, fedn.Channel.MODEL_UPDATES) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def ModelUpdateRequestStream(self, response, context): + """ A server stream RPC endpoint (Update model). Messages from client stream. + + :param response: the response + :type response: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdateRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + + client = response.sender + metadata = context.invocation_metadata() + if metadata: + metadata = dict(metadata) + print("\nClient connected: {}\n".format(metadata['client']), flush=True) + + status = fedn.Status( + status="Client {} connecting to ModelUpdateRequestStream.".format(client.name)) + status.log_level = fedn.Status.INFO + status.timestamp = str(datetime.now()) + + self.__whoami(status.sender, self) + + self._subscribe_client_to_queue( + client, fedn.Channel.MODEL_UPDATE_REQUESTS) + q = self.__get_queue(client, fedn.Channel.MODEL_UPDATE_REQUESTS) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def ModelValidationStream(self, update, context): + """ Model validation stream RPC endpoint. Update status for client is connecting to stream. + + :param update: the update message + :type update: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidation` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + client = update.sender + status = fedn.Status( + status="Client {} connecting to ModelValidationStream.".format(client.name)) + status.log_level = fedn.Status.INFO + + status.sender.name = self.id + status.sender.role = role_to_proto_role(self.role) + + self._subscribe_client_to_queue(client, fedn.Channel.MODEL_VALIDATIONS) + q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATIONS) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def ModelValidationRequestStream(self, response, context): + """ A server stream RPC endpoint (Validation). Messages from client stream. + + :param response: the response + :type response: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidationRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + """ + + client = response.sender + status = fedn.Status( + status="Client {} connecting to ModelValidationRequestStream.".format(client.name)) + status.log_level = fedn.Status.INFO + status.sender.name = self.id + status.sender.role = role_to_proto_role(self.role) + status.timestamp = str(datetime.now()) + + self._subscribe_client_to_queue( + client, fedn.Channel.MODEL_VALIDATION_REQUESTS) + q = self.__get_queue(client, fedn.Channel.MODEL_VALIDATION_REQUESTS) + + self._send_status(status) + + while context.is_active(): + try: + yield q.get(timeout=1.0) + except queue.Empty: + pass + + def SendModelUpdateRequest(self, request, context): + """ Send a model update request. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdateRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self._send_request(request, fedn.Channel.MODEL_UPDATE_REQUESTS) + + response = fedn.Response() + response.response = "RECEIVED ModelUpdateRequest from client {}".format( + request.sender.name) + return response # TODO Fill later + + def SendModelUpdate(self, request, context): + """ Send a model update response. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelUpdate` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self.control.aggregator.on_model_update(request) + + response = fedn.Response() + response.response = "RECEIVED ModelUpdate {} from client {}".format( + response, response.sender.name) + return response # TODO Fill later + + def SendModelValidationRequest(self, request, context): + """ Send a model validation request. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidationRequest` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self._send_request(request, fedn.Channel.MODEL_VALIDATION_REQUESTS) + + response = fedn.Response() + response.response = "RECEIVED ModelValidationRequest from client {}".format( + request.sender.name) + return response # TODO Fill later + + def register_model_validation(self, validation): + """Register a model validation. + + :param validation: the model validation + :type validation: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidation` + """ + + self.tracer.report_validation(validation) + + def SendModelValidation(self, request, context): + """ Send a model validation response. + + :param request: the request + :type request: :class:`fedn.common.net.grpc.fedn_pb2.ModelValidation` + :param context: the context + :type context: :class:`grpc._server._Context` + :return: the response + :rtype: :class:`fedn.common.net.grpc.fedn_pb2.Response` + """ + self.report_status("Recieved ModelValidation from {}".format(request.sender.name), + log_level=fedn.Status.INFO) + + self.register_model_validation(request) + + response = fedn.Response() + response.response = "RECEIVED ModelValidation {} from client {}".format( + response, response.sender.name) + return response + + #################################################################################################################### + + def run(self): + """ Start the server.""" + + print("COMBINER: {} started, ready for requests. ".format( + self.id), flush=True) + try: + while True: + signal.pause() + except (KeyboardInterrupt, SystemExit): + pass + self.server.stop() diff --git a/fedn/fedn/clients/reducer/config.py b/fedn/fedn/network/config.py similarity index 100% rename from fedn/fedn/clients/reducer/config.py rename to fedn/fedn/network/config.py diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/.gitkeep b/fedn/fedn/network/controller/__init__.py similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/.gitkeep rename to fedn/fedn/network/controller/__init__.py diff --git a/fedn/fedn/network/controller/control.py b/fedn/fedn/network/controller/control.py new file mode 100644 index 000000000..68e4623e1 --- /dev/null +++ b/fedn/fedn/network/controller/control.py @@ -0,0 +1,375 @@ +import copy +import time +import uuid + +from fedn.network.combiner.interfaces import CombinerUnavailableError +from fedn.network.controller.controlbase import ControlBase +from fedn.network.state import ReducerState + + +class UnsupportedStorageBackend(Exception): + """ Exception class for when storage backend is not supported. Passes """ + + def __init__(self, message): + """ Constructor method. + + :param message: The exception message. + :type message: str + + """ + self.message = message + super().__init__(self.message) + + +class MisconfiguredStorageBackend(Exception): + """ Exception class for when storage backend is misconfigured. """ + + def __init__(self, message): + """ Constructor method. + + :param message: The exception message. + :type message: str + + """ + self.message = message + super().__init__(self.message) + + +class NoModelException(Exception): + """ Exception class for when model is None """ + + def __init__(self, message): + """ Constructor method. + + :param message: The exception message. + :type message: str + + """ + self.message = message + super().__init__(self.message) + + +class Control(ControlBase): + """ Controller, implementing the overall global training, validation and inference logic. """ + + def __init__(self, statestore): + """ Constructor method. + + :param statestore: A StateStorage instance. + :type statestore: class: `fedn.common.storage.statestorage.StateStorage` + + """ + + super().__init__(statestore) + self.name = "DefaultControl" + + def session(self, config): + """ Execute a new training session. A session consists of one + or several global rounds. All rounds in the same session + have the same round_config. + + :param config: The session config. + :type config: dict + + """ + + if self._state == ReducerState.instructing: + print("Controller already in INSTRUCTING state. A session is in progress.", flush=True) + return + + if not self.get_latest_model(): + print("No model in model chain, please provide a seed model!") + return + + self._state = ReducerState.instructing + + # Must be called to set info in the db + self.new_session(config) + + self._state = ReducerState.monitoring + + last_round = int(self.get_latest_round_id()) + + # Clear potential stragglers/old model updates at combiners + for combiner in self.network.get_combiners(): + combiner.flush_model_update_queue() + + # Execute the rounds in this session + for round in range(1, int(config['rounds'] + 1)): + # Increment the round number + + # round_id = self.new_round(session['session_id']) + if last_round: + current_round = last_round + round + else: + current_round = round + + try: + _, round_data = self.round(config, current_round) + except TypeError as e: + print("Could not unpack data from round: {0}".format(e), flush=True) + + print("CONTROL: Round completed with status {}".format( + round_data['status']), flush=True) + + self.tracer.set_round_data(round_data) + + # TODO: Report completion of session + self._state = ReducerState.idle + + def round(self, session_config, round_id): + """ Execute a single global round. + + :param session_config: The session config. + :type session_config: dict + :param round_id: The round id. + :type round_id: str(int) + + """ + + round_data = {'round_id': round_id} + + if len(self.network.get_combiners()) < 1: + print("REDUCER: No combiners connected!", flush=True) + round_data['status'] = 'Failed' + return None, round_data + + # 1. Assemble round config for this global round, + # and check which combiners are able to participate + # in the round. + round_config = copy.deepcopy(session_config) + round_config['rounds'] = 1 + round_config['round_id'] = round_id + round_config['task'] = 'training' + round_config['model_id'] = self.get_latest_model() + round_config['helper_type'] = self.statestore.get_helper() + + combiners = self.get_participating_combiners(round_config) + round_start = self.evaluate_round_start_policy(combiners) + + if round_start: + print("CONTROL: round start policy met, participating combiners {}".format( + combiners), flush=True) + else: + print("CONTROL: Round start policy not met, skipping round!", flush=True) + round_data['status'] = 'Failed' + return None + + round_data['round_config'] = round_config + + # 2. Ask participating combiners to coordinate model updates + _ = self.request_model_updates(combiners) + + # Wait until participating combiners have produced an updated global model. + wait = 0.0 + # dict to store combiners that have successfully produced an updated model + updated = {} + # wait until all combiners have produced an updated model or until round timeout + print("CONTROL: Fetching round config (ID: {round_id}) from statestore:".format( + round_id=round_id), flush=True) + while len(updated) < len(combiners): + round = self.statestore.get_round(round_id) + if round: + print("CONTROL: Round found!", flush=True) + # For each combiner in the round, check if it has produced an updated model (status == 'Success') + for combiner in round['combiners']: + print(combiner, flush=True) + if combiner['status'] == 'Success': + if combiner['name'] not in updated.keys(): + # Add combiner to updated dict + updated[combiner['name']] = combiner['model_id'] + # Print combiner status + print("CONTROL: Combiner {name} status: {status}".format( + name=combiner['name'], status=combiner['status']), flush=True) + else: + # Print every 10 seconds based on value of wait + if wait % 10 == 0: + print("CONTROL: Waiting for round to complete...", flush=True) + if wait >= session_config['round_timeout']: + print("CONTROL: Round timeout! Exiting round...", flush=True) + break + # Update wait time used for timeout + time.sleep(1.0) + wait += 1.0 + + round_valid = self.evaluate_round_validity_policy(updated) + if not round_valid: + print("REDUCER CONTROL: Round invalid!", flush=True) + round_data['status'] = 'Failed' + return None, round_data + + print("CONTROL: Reducing models from combiners...", flush=True) + # 3. Reduce combiner models into a global model + try: + model, data = self.reduce(updated) + round_data['reduce'] = data + print("CONTROL: Done reducing models from combiners!", flush=True) + except Exception as e: + print("CONTROL: Failed to reduce models from combiners: {}".format( + e), flush=True) + round_data['status'] = 'Failed' + return None, round_data + + # 6. Commit the global model to model trail + if model is not None: + print("CONTROL: Committing global model to model trail...", flush=True) + tic = time.time() + model_id = uuid.uuid4() + self.commit(model_id, model) + round_data['time_commit'] = time.time() - tic + print("CONTROL: Done committing global model to model trail!", flush=True) + else: + print("REDUCER: failed to update model in round with config {}".format( + session_config), flush=True) + round_data['status'] = 'Failed' + return None, round_data + + round_data['status'] = 'Success' + + # 4. Trigger participating combiner nodes to execute a validation round for the current model + validate = session_config['validate'] + if validate: + combiner_config = copy.deepcopy(session_config) + combiner_config['round_id'] = round_id + combiner_config['model_id'] = self.get_latest_model() + combiner_config['task'] = 'validation' + combiner_config['helper_type'] = self.statestore.get_helper() + + validating_combiners = self._select_participating_combiners( + combiner_config) + + for combiner, combiner_config in validating_combiners: + try: + print("CONTROL: Submitting validation round to combiner {}".format( + combiner), flush=True) + combiner.submit(combiner_config) + except CombinerUnavailableError: + self._handle_unavailable_combiner(combiner) + pass + + return model_id, round_data + + def reduce(self, combiners): + """ Combine updated models from Combiner nodes into one global model. + + :param combiners: dict of combiner names (key) and model IDs (value) to reduce + :type combiners: dict + """ + + meta = {} + meta['time_fetch_model'] = 0.0 + meta['time_load_model'] = 0.0 + meta['time_aggregate_model'] = 0.0 + + i = 1 + model = None + # Check if there are any combiners to reduce + if len(combiners) == 0: + print("REDUCER: No combiners to reduce!", flush=True) + return model, meta + + for name, model_id in combiners.items(): + + # TODO: Handle inactive RPC error in get_model and raise specific error + print("REDUCER: Fetching model ({model_id}) from combiner {name}".format( + model_id=model_id, name=name), flush=True) + try: + tic = time.time() + combiner = self.get_combiner(name) + data = combiner.get_model(model_id) + meta['time_fetch_model'] += (time.time() - tic) + except Exception as e: + print("REDUCER: Failed to fetch model from combiner {}: {}".format( + name, e), flush=True) + data = None + + if data is not None: + try: + tic = time.time() + helper = self.get_helper() + data.seek(0) + model_next = helper.load(data) + meta['time_load_model'] += (time.time() - tic) + tic = time.time() + model = helper.increment_average(model, model_next, i, i) + meta['time_aggregate_model'] += (time.time() - tic) + except Exception: + tic = time.time() + data.seek(0) + model = helper.load(data) + meta['time_aggregate_model'] += (time.time() - tic) + i = i + 1 + + return model, meta + + def infer_instruct(self, config): + """ Main entrypoint for executing the inference compute plan. + + :param config: configuration for the inference round + """ + + # Check/set instucting state + if self.__state == ReducerState.instructing: + print("Already set in INSTRUCTING state", flush=True) + return + self.__state = ReducerState.instructing + + # Check for a model chain + if not self.get_latest_model(): + print("No model in model chain, please seed the alliance!") + + # Set reducer in monitoring state + self.__state = ReducerState.monitoring + + # Start inference round + try: + self.inference_round(config) + except TypeError: + print("Could not unpack data from round...", flush=True) + + # Set reducer in idle state + self.__state = ReducerState.idle + + def inference_round(self, config): + """ Execute an inference round. + + :param config: configuration for the inference round + """ + + # Init meta + round_data = {} + + # Check for at least one combiner in statestore + if len(self.network.get_combiners()) < 1: + print("REDUCER: No combiners connected!") + return round_data + + # Setup combiner configuration + combiner_config = copy.deepcopy(config) + combiner_config['model_id'] = self.get_latest_model() + combiner_config['task'] = 'inference' + combiner_config['helper_type'] = self.statestore.get_framework() + + # Select combiners + validating_combiners = self._select_round_combiners( + combiner_config) + + # Test round start policy + round_start = self.check_round_start_policy(validating_combiners) + if round_start: + print("CONTROL: round start policy met, participating combiners {}".format( + validating_combiners), flush=True) + else: + print("CONTROL: Round start policy not met, skipping round!", flush=True) + return None + + # Synch combiners with latest model and trigger inference + for combiner, combiner_config in validating_combiners: + try: + combiner.submit(combiner_config) + except CombinerUnavailableError: + # It is OK if inference fails for a combiner + self._handle_unavailable_combiner(combiner) + pass + + return round_data diff --git a/fedn/fedn/network/controller/controlbase.py b/fedn/fedn/network/controller/controlbase.py new file mode 100644 index 000000000..471e47a6a --- /dev/null +++ b/fedn/fedn/network/controller/controlbase.py @@ -0,0 +1,296 @@ +import os +import uuid +from abc import ABC, abstractmethod + +import fedn.utils.helpers +from fedn.common.storage.s3.s3repo import S3ModelRepository +from fedn.common.tracer.mongotracer import MongoTracer +from fedn.network.combiner.interfaces import CombinerUnavailableError +from fedn.network.network import Network +from fedn.network.state import ReducerState + + +class UnsupportedStorageBackend(Exception): + pass + + +class MisconfiguredStorageBackend(Exception): + pass + + +class MisconfiguredHelper(Exception): + pass + + +class ControlBase(ABC): + """ Base class and interface for a global controller. + Override this class to implement a global training strategy (control). + """ + + @abstractmethod + def __init__(self, statestore): + """ """ + self._state = ReducerState.setup + + self.statestore = statestore + if self.statestore.is_inited(): + self.network = Network(self, statestore) + + try: + storage_config = self.statestore.get_storage_backend() + except Exception: + print( + "REDUCER CONTROL: Failed to retrive storage configuration, exiting.", flush=True) + raise MisconfiguredStorageBackend() + + if storage_config['storage_type'] == 'S3': + self.model_repository = S3ModelRepository(storage_config['storage_config']) + else: + print("REDUCER CONTROL: Unsupported storage backend, exiting.", flush=True) + raise UnsupportedStorageBackend() + + # The tracer is a helper that manages state in the database backend + statestore_config = statestore.get_config() + self.tracer = MongoTracer( + statestore_config['mongo_config'], statestore_config['network_id']) + + if self.statestore.is_inited(): + self._state = ReducerState.idle + + @abstractmethod + def session(self, config): + pass + + @abstractmethod + def round(self, config, round_number): + pass + + @abstractmethod + def reduce(self, combiners): + pass + + def get_helper(self): + """ Get a helper instance from global config. + + :return: Helper instance. + """ + helper_type = self.statestore.get_helper() + helper = fedn.utils.helpers.get_helper(helper_type) + if not helper: + raise MisconfiguredHelper("Unsupported helper type {}, please configure compute_package.helper !".format(helper_type)) + return helper + + def get_state(self): + """ + + :return: + """ + return self._state + + def idle(self): + """ + + :return: + """ + if self._state == ReducerState.idle: + return True + else: + return False + + def get_first_model(self): + """ + + :return: + """ + return self.statestore.get_first() + + def get_latest_model(self): + """ + + :return: + """ + return self.statestore.get_latest() + + def get_model_info(self): + """ + + :return: + """ + return self.statestore.get_model_info() + + def get_events(self): + """ + + :return: + """ + return self.statestore.get_events() + + def get_latest_round_id(self): + last_round = self.statestore.get_latest_round() + if not last_round: + return 0 + else: + return last_round['round_id'] + + def get_latest_round(self): + round = self.statestore.get_latest_round() + return round + + def get_compute_package_name(self): + """ + + :return: + """ + definition = self.statestore.get_compute_package() + if definition: + try: + package_name = definition['filename'] + return package_name + except (IndexError, KeyError): + print( + "No context filename set for compute context definition", flush=True) + return None + else: + return None + + def set_compute_package(self, filename, path): + """ Persist the configuration for the compute package. """ + self.model_repository.set_compute_package(filename, path) + self.statestore.set_compute_package(filename) + + def get_compute_package(self, compute_package=''): + """ + + :param compute_package: + :return: + """ + if compute_package == '': + compute_package = self.get_compute_package_name() + if compute_package: + return self.model_repository.get_compute_package(compute_package) + else: + return None + + def new_session(self, config): + """ Initialize a new session in backend db. """ + + if "session_id" not in config.keys(): + session_id = uuid.uuid4() + config['session_id'] = str(session_id) + + self.tracer.new_session(id=session_id) + self.tracer.set_session_config(session_id, config) + + def request_model_updates(self, combiners): + """Call Combiner server RPC to get a model update. """ + cl = [] + for combiner, combiner_round_config in combiners: + response = combiner.submit(combiner_round_config) + cl.append((combiner, response)) + return cl + + def commit(self, model_id, model=None): + """ Commit a model to the global model trail. The model commited becomes the lastest consensus model. """ + + helper = self.get_helper() + if model is not None: + print("CONTROL: Saving model file temporarily to disk...", flush=True) + outfile_name = helper.save(model) + print("CONTROL: Uploading model to Minio...", flush=True) + model_id = self.model_repository.set_model( + outfile_name, is_file=True) + + print("CONTROL: Deleting temporary model file...", flush=True) + os.unlink(outfile_name) + + print("CONTROL: Committing model {} to global model trail in statestore...".format( + model_id), flush=True) + self.statestore.set_latest(model_id) + + def get_combiner(self, name): + for combiner in self.network.get_combiners(): + if combiner.name == name: + return combiner + return None + + def get_participating_combiners(self, combiner_round_config): + """Assemble a list of combiners able to participate in a round as + descibed by combiner_round_config. + """ + combiners = [] + for combiner in self.network.get_combiners(): + try: + combiner_state = combiner.report() + except CombinerUnavailableError: + self._handle_unavailable_combiner(combiner) + combiner_state = None + + if combiner_state is not None: + is_participating = self.evaluate_round_participation_policy( + combiner_round_config, combiner_state) + if is_participating: + combiners.append((combiner, combiner_round_config)) + return combiners + + def evaluate_round_participation_policy(self, compute_plan, combiner_state): + """ Evaluate policy for combiner round-participation. + A combiner participates if it is responsive and reports enough + active clients to participate in the round. + """ + + if compute_plan['task'] == 'training': + nr_active_clients = int(combiner_state['nr_active_trainers']) + elif compute_plan['task'] == 'validation': + nr_active_clients = int(combiner_state['nr_active_validators']) + else: + print("Invalid task type!", flush=True) + return False + + if int(compute_plan['clients_required']) <= nr_active_clients: + return True + else: + return False + + def evaluate_round_start_policy(self, combiners): + """ Check if the policy to start a round is met. """ + if len(combiners) > 0: + + return True + else: + return False + + def evaluate_round_validity_policy(self, combiners): + """ Check if the round should be seen as valid. + + At the end of the round, before committing a model to the global model trail, + we check if the round validity policy has been met. This can involve + e.g. asserting that a certain number of combiners have reported in an + updated model, or that criteria on model performance have been met. + """ + if combiners.keys() == []: + return False + else: + return True + + def _select_participating_combiners(self, compute_plan): + participating_combiners = [] + for combiner in self.network.get_combiners(): + try: + combiner_state = combiner.report() + except CombinerUnavailableError: + self._handle_unavailable_combiner(combiner) + combiner_state = None + + if combiner_state: + is_participating = self.evaluate_round_participation_policy( + compute_plan, combiner_state) + if is_participating: + participating_combiners.append((combiner, compute_plan)) + return participating_combiners + + def state(self): + """ + + :return: + """ + return self._state diff --git a/fedn/fedn/clients/reducer/__init__.py b/fedn/fedn/network/dashboard/__init__.py similarity index 100% rename from fedn/fedn/clients/reducer/__init__.py rename to fedn/fedn/network/dashboard/__init__.py diff --git a/fedn/fedn/clients/reducer/plots.py b/fedn/fedn/network/dashboard/plots.py similarity index 100% rename from fedn/fedn/clients/reducer/plots.py rename to fedn/fedn/network/dashboard/plots.py diff --git a/fedn/fedn/clients/reducer/restservice.py b/fedn/fedn/network/dashboard/restservice.py similarity index 88% rename from fedn/fedn/clients/reducer/restservice.py rename to fedn/fedn/network/dashboard/restservice.py index 432c6fa92..5782ff088 100644 --- a/fedn/fedn/clients/reducer/restservice.py +++ b/fedn/fedn/network/dashboard/restservice.py @@ -17,10 +17,10 @@ url_for) from werkzeug.utils import secure_filename -from fedn.clients.reducer.interfaces import CombinerInterface -from fedn.clients.reducer.plots import Plot -from fedn.clients.reducer.state import ReducerState, ReducerStateToString from fedn.common.tracer.mongotracer import MongoTracer +from fedn.network.combiner.interfaces import CombinerInterface +from fedn.network.dashboard.plots import Plot +from fedn.network.state import ReducerState, ReducerStateToString from fedn.utils.checksum import sha UPLOAD_FOLDER = '/app/client/package/' @@ -106,8 +106,11 @@ def __init__(self, config, control, certificate_manager): else: self.SECRET_KEY = None - self.remote_compute_context = config["remote_compute_context"] - if self.remote_compute_context: + if 'use_ssl' in config.keys(): + self.use_ssl = config['use_ssl'] + + self.remote_compute_package = config["remote_compute_package"] + if self.remote_compute_package: self.package = 'remote' else: self.package = 'local' @@ -126,17 +129,17 @@ def to_dict(self): } return data - def check_compute_context(self): - """Check if the compute context/package has been configured, + def check_compute_package(self): + """Check if the compute package has been configured, if remote compute context is set to False, True will be returned :return: True if configured :rtype: bool """ - if not self.remote_compute_context: + if not self.remote_compute_package: return True - if not self.control.get_compute_context(): + if not self.control.get_compute_package(): return False else: return True @@ -165,7 +168,7 @@ def check_configured_response(self): 'package': self.package, 'msg': "Controller is not configured."}) - if not self.check_compute_context(): + if not self.check_compute_package(): return jsonify({'status': 'retry', 'package': self.package, 'msg': "Compute package is not configured. Please upload the compute package."}) @@ -187,7 +190,7 @@ def check_configured(self): Check if initial model has been configured, otherwise render setup_model template. :return: Rendered html template or None """ - if not self.check_compute_context(): + if not self.check_compute_package(): return render_template('setup.html', client=self.name, state=ReducerStateToString(self.control.state()), logs=None, refresh=False, message='Please set the compute package') @@ -420,8 +423,7 @@ def add(): address).get_keypair_raw() _ = base64.b64encode(certificate) _ = base64.b64encode(key) - certificate = copy.deepcopy(certificate) - key = copy.deepcopy(key) + else: certificate = None key = None @@ -432,9 +434,10 @@ def add(): address=address, fqdn=fqdn, port=port, - certificate=certificate, - key=key, + certificate=copy.deepcopy(certificate), + key=copy.deepcopy(key), ip=request.remote_addr) + self.control.network.add_combiner(combiner) combiner = self.control.network.get_combiner(name) @@ -443,8 +446,8 @@ def add(): 'status': 'added', 'storage': self.control.statestore.get_storage_backend(), 'statestore': self.control.statestore.get_config(), - 'certificate': combiner['certificate'], - 'key': combiner['key'] + 'certificate': combiner.get_certificate(), + 'key': combiner.get_key() } return jsonify(ret) @@ -480,7 +483,8 @@ def models(): uploaded_seed.seek(0) a.write(uploaded_seed.read()) helper = self.control.get_helper() - model = helper.load_model_from_BytesIO(a.getbuffer()) + a.seek(0) + model = helper.load(a) self.control.commit(uploaded_seed.filename, model) else: not_configured = self.check_configured() @@ -541,7 +545,7 @@ def drop_control(): # http://localhost:8090/control?rounds=4&model_id=879fa112-c861-4cb1-a25d-775153e5b548 @app.route('/control', methods=['GET', 'POST']) def control(): - """ Main page for round control. Configure, start and stop global training rounds. """ + """ Main page for round control. Configure, start and stop training sessions. """ # Token auth if self.token_auth_enabled: self.authorize(request, app.config.get('SECRET_KEY')) @@ -549,14 +553,13 @@ def control(): not_configured = self.check_configured() if not_configured: return not_configured - client = self.name + state = ReducerStateToString(self.control.state()) - logs = None refresh = True - if self.remote_compute_context: + if self.remote_compute_package: try: - self.current_compute_context = self.control.get_compute_context() + self.current_compute_context = self.control.get_compute_package_name() except Exception: self.current_compute_context = None else: @@ -566,8 +569,11 @@ def control(): url_for('index', state=state, refresh=refresh, message="Reducer is in monitoring state")) if request.method == 'POST': - timeout = float(request.form.get('timeout', 180)) + # Get session configuration + round_timeout = float(request.form.get('timeout', 180)) + buffer_size = int(request.form.get('buffer_size', -1)) rounds = int(request.form.get('rounds', 1)) + delete_models = request.form.get('delete_models', True) task = (request.form.get('task', '')) clients_required = request.form.get('clients_required', 1) clients_requested = request.form.get('clients_requested', 8) @@ -596,14 +602,15 @@ def control(): latest_model_id = self.control.get_latest_model() - config = {'round_timeout': timeout, 'model_id': latest_model_id, - 'rounds': rounds, 'clients_required': clients_required, + config = {'round_timeout': round_timeout, 'buffer_size': buffer_size, + 'model_id': latest_model_id, 'rounds': rounds, 'delete_models_storage': delete_models, + 'clients_required': clients_required, 'clients_requested': clients_requested, 'task': task, 'validate': validate, 'helper_type': helper_type} - threading.Thread(target=self.control.instruct, + threading.Thread(target=self.control.session, args=(config,)).start() - # self.control.instruct(config) + return redirect(url_for('index', state=state, refresh=refresh, message="Sent execution plan.", message_type='SUCCESS')) @@ -619,18 +626,12 @@ def control(): return render_template('index.html', latest_model_id=latest_model_id, compute_package=self.current_compute_context, seed_model_id=seed_model_id, - helper=self.control.statestore.get_framework(), validate=True, configured=True) - - client = self.name - state = ReducerStateToString(self.control.state()) - logs = None - refresh = False - return render_template('index.html', client=client, state=state, logs=logs, refresh=refresh, - configured=True) + helper=self.control.statestore.get_helper(), validate=True, configured=True) @app.route('/assign') def assign(): """Handle client assignment requests. """ + if self.token_auth_enabled: self.authorize(request, app.config.get('SECRET_KEY')) @@ -643,9 +644,9 @@ def assign(): combiner_preferred = request.args.get('combiner', None) if combiner_preferred: - combiner = self.control.find(combiner_preferred) + combiner = self.control.network.get_combiner(combiner_preferred) else: - combiner = self.control.find_available_combiner() + combiner = self.control.network.find_available_combiner() if combiner is None: return jsonify({'status': 'retry', @@ -678,7 +679,7 @@ def assign(): 'ip': combiner.ip, 'port': combiner.port, 'certificate': cert, - 'model_type': self.control.statestore.get_framework() + 'model_type': self.control.statestore.get_helper() } return jsonify(response) @@ -818,6 +819,7 @@ def network(): mem_cpu_plot = plot.create_cpu_plot() combiner_info = combiner_status() active_clients = client_status() + # print(combiner_info, flush=True) return render_template('network.html', network_plot=True, round_time_plot=round_time_plot, mem_cpu_plot=mem_cpu_plot, @@ -835,7 +837,7 @@ def config_download(): :return: """ chk_string = "" - name = self.control.get_compute_context() + name = self.control.get_compute_package_name() if name is None or name == '': chk_string = '' else: @@ -889,7 +891,7 @@ def context(): return redirect(url_for('context')) file = request.files['file'] - helper_type = request.form.get('helper', 'keras') + helper_type = request.form.get('helper', 'kerashelper') # if user does not select file, browser also # submit an empty part without filename if file.filename == '': @@ -905,14 +907,14 @@ def context(): if self.control.state() == ReducerState.instructing or self.control.state() == ReducerState.monitoring: return "Not allowed to change context while execution is ongoing." - self.control.set_compute_context(filename, file_path) - self.control.statestore.set_framework(helper_type) + self.control.set_compute_package(filename, file_path) + self.control.statestore.set_helper(helper_type) return redirect(url_for('control')) name = request.args.get('name', '') if name == '': - name = self.control.get_compute_context() + name = self.control.get_compute_package_name() if name is None or name == '': return render_template('context.html') @@ -945,7 +947,7 @@ def checksum(): # sum = '' name = request.args.get('name', None) if name == '' or name is None: - name = self.control.get_compute_context() + name = self.control.get_compute_package_name() if name is None or name == '': return jsonify({}) @@ -961,6 +963,56 @@ def checksum(): return jsonify(data) + @app.route('/infer', methods=['POST']) + def infer(): + """ + + :return: + """ + # Token auth + if self.token_auth_enabled: + self.authorize(request, app.config.get('SECRET_KEY')) + + # Check configured + not_configured = self.check_configured() + if not_configured: + return not_configured + + # Check compute context + if self.remote_compute_context: + try: + self.current_compute_context = self.control.get_compute_package() + except Exception as e: + print(e, flush=True) + self.current_compute_context = None + else: + self.current_compute_context = "None:Local" + + # Redirect if in monitoring state + if self.control.state() == ReducerState.monitoring: + return redirect( + url_for('index', state=ReducerStateToString(self.control.state()), refresh=True, message="Reducer is in monitoring state")) + + # POST params + timeout = int(request.form.get('timeout', 180)) + helper_type = request.form.get('helper', 'keras') + clients_required = request.form.get('clients_required', 1) + clients_requested = request.form.get('clients_requested', 8) + + # Start inference request + config = {'round_timeout': timeout, + 'model_id': self.control.get_latest_model(), + 'clients_required': clients_required, + 'clients_requested': clients_requested, + 'task': 'inference', + 'helper_type': helper_type} + threading.Thread(target=self.control.infer_instruct, + args=(config,)).start() + + # Redirect + return redirect(url_for('index', state=ReducerStateToString(self.control.state()), refresh=True, message="Sent execution plan (inference).", + message_type='SUCCESS')) + if not self.host: bind = "0.0.0.0" else: diff --git a/fedn/fedn/clients/reducer/static/dist/css/dark.css b/fedn/fedn/network/dashboard/static/dist/css/dark.css similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/css/dark.css rename to fedn/fedn/network/dashboard/static/dist/css/dark.css diff --git a/fedn/fedn/clients/reducer/static/dist/css/light.css b/fedn/fedn/network/dashboard/static/dist/css/light.css similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/css/light.css rename to fedn/fedn/network/dashboard/static/dist/css/light.css diff --git a/fedn/fedn/network/dashboard/static/dist/fonts/.gitkeep b/fedn/fedn/network/dashboard/static/dist/fonts/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.eot b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.eot similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.eot rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.eot diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.svg b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.svg rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.svg diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.ttf b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.ttf similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.ttf rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.ttf diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff2 b/fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff2 similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-brands-400.woff2 rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-brands-400.woff2 diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.eot b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.eot similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.eot rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.eot diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.svg b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.svg rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.svg diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.ttf b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.ttf similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.ttf rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.ttf diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff2 b/fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff2 similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-regular-400.woff2 rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-regular-400.woff2 diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.eot b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.eot similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.eot rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.eot diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.svg b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.svg rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.svg diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.ttf b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.ttf similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.ttf rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.ttf diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff diff --git a/fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff2 b/fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff2 similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/fonts/fa-solid-900.woff2 rename to fedn/fedn/network/dashboard/static/dist/fonts/fa-solid-900.woff2 diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-2.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-2.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-2.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-2.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-3.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-3.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-3.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-3.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-4.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-4.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-4.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-4.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-5.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-5.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar-5.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar-5.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/avatars/avatar.jpg b/fedn/fedn/network/dashboard/static/dist/img/avatars/avatar.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/avatars/avatar.jpg rename to fedn/fedn/network/dashboard/static/dist/img/avatars/avatar.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/brands/bootstrap.svg b/fedn/fedn/network/dashboard/static/dist/img/brands/bootstrap.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/brands/bootstrap.svg rename to fedn/fedn/network/dashboard/static/dist/img/brands/bootstrap.svg diff --git a/fedn/fedn/clients/reducer/static/dist/img/favicon.ico b/fedn/fedn/network/dashboard/static/dist/img/favicon.ico similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/favicon.ico rename to fedn/fedn/network/dashboard/static/dist/img/favicon.ico diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ad.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ad.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ad.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ad.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ae.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ae.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ae.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ae.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/af.png b/fedn/fedn/network/dashboard/static/dist/img/flags/af.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/af.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/af.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ag.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ag.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ag.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ag.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ai.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ai.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ai.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ai.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/al.png b/fedn/fedn/network/dashboard/static/dist/img/flags/al.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/al.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/al.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/am.png b/fedn/fedn/network/dashboard/static/dist/img/flags/am.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/am.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/am.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/an.png b/fedn/fedn/network/dashboard/static/dist/img/flags/an.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/an.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/an.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ao.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ao.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ao.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ao.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/aq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/aq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/aq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/aq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ar.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ar.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ar.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ar.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/as.png b/fedn/fedn/network/dashboard/static/dist/img/flags/as.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/as.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/as.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/at.png b/fedn/fedn/network/dashboard/static/dist/img/flags/at.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/at.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/at.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/au.png b/fedn/fedn/network/dashboard/static/dist/img/flags/au.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/au.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/au.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/aw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/aw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/aw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/aw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ax.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ax.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ax.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ax.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/az.png b/fedn/fedn/network/dashboard/static/dist/img/flags/az.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/az.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/az.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ba.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ba.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ba.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ba.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/be.png b/fedn/fedn/network/dashboard/static/dist/img/flags/be.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/be.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/be.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/br.png b/fedn/fedn/network/dashboard/static/dist/img/flags/br.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/br.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/br.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bs.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bs.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bs.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bs.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/by.png b/fedn/fedn/network/dashboard/static/dist/img/flags/by.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/by.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/by.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/bz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/bz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/bz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/bz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ca.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ca.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ca.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ca.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ch.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ch.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ch.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ch.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ci.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ci.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ci.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ci.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ck.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ck.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ck.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ck.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/co.png b/fedn/fedn/network/dashboard/static/dist/img/flags/co.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/co.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/co.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cx.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cx.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cx.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cx.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/cz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/cz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/cz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/cz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/de.png b/fedn/fedn/network/dashboard/static/dist/img/flags/de.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/de.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/de.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/do.png b/fedn/fedn/network/dashboard/static/dist/img/flags/do.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/do.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/do.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/dz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/dz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/dz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/dz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ec.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ec.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ec.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ec.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ee.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ee.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ee.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ee.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/eg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/eg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/eg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/eg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/eh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/eh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/eh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/eh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/er.png b/fedn/fedn/network/dashboard/static/dist/img/flags/er.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/er.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/er.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/es.png b/fedn/fedn/network/dashboard/static/dist/img/flags/es.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/es.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/es.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/et.png b/fedn/fedn/network/dashboard/static/dist/img/flags/et.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/et.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/et.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/eu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/eu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/eu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/eu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/fr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/fr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/fr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/fr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ga.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ga.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ga.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ga.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-eng.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-eng.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-eng.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-eng.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-nir.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-nir.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-nir.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-nir.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-sct.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-sct.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-sct.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-sct.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb-wls.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb-wls.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb-wls.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb-wls.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ge.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ge.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ge.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ge.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gs.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gs.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gs.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gs.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/gy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/gy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/gy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/gy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ht.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ht.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ht.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ht.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/hu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/hu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/hu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/hu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/id.png b/fedn/fedn/network/dashboard/static/dist/img/flags/id.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/id.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/id.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ie.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ie.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ie.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ie.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/il.png b/fedn/fedn/network/dashboard/static/dist/img/flags/il.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/il.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/il.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/im.png b/fedn/fedn/network/dashboard/static/dist/img/flags/im.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/im.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/im.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/in.png b/fedn/fedn/network/dashboard/static/dist/img/flags/in.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/in.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/in.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/io.png b/fedn/fedn/network/dashboard/static/dist/img/flags/io.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/io.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/io.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/iq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/iq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/iq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/iq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ir.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ir.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ir.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ir.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/is.png b/fedn/fedn/network/dashboard/static/dist/img/flags/is.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/is.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/is.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/it.png b/fedn/fedn/network/dashboard/static/dist/img/flags/it.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/it.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/it.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/je.png b/fedn/fedn/network/dashboard/static/dist/img/flags/je.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/je.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/je.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/jm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/jm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/jm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/jm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/jo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/jo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/jo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/jo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/jp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/jp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/jp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/jp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ke.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ke.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ke.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ke.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ki.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ki.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ki.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ki.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/km.png b/fedn/fedn/network/dashboard/static/dist/img/flags/km.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/km.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/km.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ky.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ky.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ky.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ky.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/kz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/kz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/kz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/kz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/la.png b/fedn/fedn/network/dashboard/static/dist/img/flags/la.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/la.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/la.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/li.png b/fedn/fedn/network/dashboard/static/dist/img/flags/li.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/li.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/li.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ls.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ls.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ls.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ls.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/lv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/lv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/lv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/lv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ly.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ly.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ly.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ly.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ma.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ma.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ma.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ma.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/md.png b/fedn/fedn/network/dashboard/static/dist/img/flags/md.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/md.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/md.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/me.png b/fedn/fedn/network/dashboard/static/dist/img/flags/me.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/me.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/me.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ml.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ml.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ml.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ml.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mo.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mo.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mo.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mo.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mp.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mp.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mp.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mp.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mq.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mq.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mq.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mq.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ms.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ms.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ms.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ms.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mx.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mx.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mx.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mx.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/my.png b/fedn/fedn/network/dashboard/static/dist/img/flags/my.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/my.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/my.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/mz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/mz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/mz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/mz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/na.png b/fedn/fedn/network/dashboard/static/dist/img/flags/na.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/na.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/na.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ne.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ne.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ne.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ne.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ng.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ng.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ng.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ng.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ni.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ni.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ni.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ni.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/no.png b/fedn/fedn/network/dashboard/static/dist/img/flags/no.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/no.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/no.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/np.png b/fedn/fedn/network/dashboard/static/dist/img/flags/np.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/np.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/np.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/nz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/nz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/nz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/nz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/om.png b/fedn/fedn/network/dashboard/static/dist/img/flags/om.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/om.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/om.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pa.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pa.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pa.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pa.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pe.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pe.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pe.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pe.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ph.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ph.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ph.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ph.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ps.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ps.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ps.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ps.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/pw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/pw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/pw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/pw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/py.png b/fedn/fedn/network/dashboard/static/dist/img/flags/py.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/py.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/py.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/qa.png b/fedn/fedn/network/dashboard/static/dist/img/flags/qa.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/qa.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/qa.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/re.png b/fedn/fedn/network/dashboard/static/dist/img/flags/re.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/re.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/re.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ro.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ro.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ro.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ro.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/rs.png b/fedn/fedn/network/dashboard/static/dist/img/flags/rs.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/rs.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/rs.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ru.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ru.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ru.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ru.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/rw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/rw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/rw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/rw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sa.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sa.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sa.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sa.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sb.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sb.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sb.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sb.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sd.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sd.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sd.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sd.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/se.png b/fedn/fedn/network/dashboard/static/dist/img/flags/se.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/se.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/se.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sh.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sh.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sh.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sh.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/si.png b/fedn/fedn/network/dashboard/static/dist/img/flags/si.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/si.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/si.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/so.png b/fedn/fedn/network/dashboard/static/dist/img/flags/so.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/so.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/so.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ss.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ss.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ss.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ss.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/st.png b/fedn/fedn/network/dashboard/static/dist/img/flags/st.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/st.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/st.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sx.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sx.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sx.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sx.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/sz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/sz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/sz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/sz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/td.png b/fedn/fedn/network/dashboard/static/dist/img/flags/td.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/td.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/td.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/th.png b/fedn/fedn/network/dashboard/static/dist/img/flags/th.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/th.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/th.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tj.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tj.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tj.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tj.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tl.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tl.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tl.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tl.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/to.png b/fedn/fedn/network/dashboard/static/dist/img/flags/to.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/to.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/to.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tr.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tr.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tr.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tr.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tv.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tv.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tv.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tv.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/tz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/tz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/tz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/tz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ua.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ua.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ua.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ua.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ug.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ug.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ug.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ug.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/um.png b/fedn/fedn/network/dashboard/static/dist/img/flags/um.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/um.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/um.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/us.png b/fedn/fedn/network/dashboard/static/dist/img/flags/us.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/us.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/us.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/uy.png b/fedn/fedn/network/dashboard/static/dist/img/flags/uy.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/uy.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/uy.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/uz.png b/fedn/fedn/network/dashboard/static/dist/img/flags/uz.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/uz.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/uz.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/va.png b/fedn/fedn/network/dashboard/static/dist/img/flags/va.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/va.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/va.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vc.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vc.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vc.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vc.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ve.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ve.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ve.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ve.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vg.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vg.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vg.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vg.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vi.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vi.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vi.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vi.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vn.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vn.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vn.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vn.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/vu.png b/fedn/fedn/network/dashboard/static/dist/img/flags/vu.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/vu.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/vu.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/wf.png b/fedn/fedn/network/dashboard/static/dist/img/flags/wf.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/wf.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/wf.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ws.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ws.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ws.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ws.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/xk.png b/fedn/fedn/network/dashboard/static/dist/img/flags/xk.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/xk.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/xk.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/ye.png b/fedn/fedn/network/dashboard/static/dist/img/flags/ye.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/ye.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/ye.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/yt.png b/fedn/fedn/network/dashboard/static/dist/img/flags/yt.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/yt.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/yt.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/za.png b/fedn/fedn/network/dashboard/static/dist/img/flags/za.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/za.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/za.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/zm.png b/fedn/fedn/network/dashboard/static/dist/img/flags/zm.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/zm.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/zm.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/flags/zw.png b/fedn/fedn/network/dashboard/static/dist/img/flags/zw.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/flags/zw.png rename to fedn/fedn/network/dashboard/static/dist/img/flags/zw.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/illustrations/customer-support.png b/fedn/fedn/network/dashboard/static/dist/img/illustrations/customer-support.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/illustrations/customer-support.png rename to fedn/fedn/network/dashboard/static/dist/img/illustrations/customer-support.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/illustrations/searching.png b/fedn/fedn/network/dashboard/static/dist/img/illustrations/searching.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/illustrations/searching.png rename to fedn/fedn/network/dashboard/static/dist/img/illustrations/searching.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/illustrations/social.png b/fedn/fedn/network/dashboard/static/dist/img/illustrations/social.png similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/illustrations/social.png rename to fedn/fedn/network/dashboard/static/dist/img/illustrations/social.png diff --git a/fedn/fedn/clients/reducer/static/dist/img/logo.svg b/fedn/fedn/network/dashboard/static/dist/img/logo.svg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/logo.svg rename to fedn/fedn/network/dashboard/static/dist/img/logo.svg diff --git a/fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-1.jpg b/fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-1.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-1.jpg rename to fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-1.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-2.jpg b/fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-2.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-2.jpg rename to fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-2.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-3.jpg b/fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-3.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/photos/unsplash-3.jpg rename to fedn/fedn/network/dashboard/static/dist/img/photos/unsplash-3.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-analytics.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-analytics.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-analytics.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-analytics.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-crypto.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-crypto.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-crypto.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-crypto.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-default.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-default.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-default.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-default.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-saas.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-saas.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-saas.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-saas.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-social.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-social.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/dashboard-social.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/dashboard-social.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/mixed.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/mixed.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/mixed.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/mixed.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/pages-projects-list.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/pages-projects-list.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/pages-projects-list.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/pages-projects-list.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-compact.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-compact.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-compact.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-compact.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-right.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-right.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/sidebar-right.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/sidebar-right.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-colored.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-colored.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-colored.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-colored.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-dark.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-dark.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-dark.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-dark.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-default.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-default.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-default.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-default.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-light.jpg b/fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-light.jpg similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/img/screenshots/theme-light.jpg rename to fedn/fedn/network/dashboard/static/dist/img/screenshots/theme-light.jpg diff --git a/fedn/fedn/clients/reducer/static/dist/js/app.js b/fedn/fedn/network/dashboard/static/dist/js/app.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/app.js rename to fedn/fedn/network/dashboard/static/dist/js/app.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/app.js.LICENSE.txt b/fedn/fedn/network/dashboard/static/dist/js/app.js.LICENSE.txt similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/app.js.LICENSE.txt rename to fedn/fedn/network/dashboard/static/dist/js/app.js.LICENSE.txt diff --git a/fedn/fedn/clients/reducer/static/dist/js/plot.js b/fedn/fedn/network/dashboard/static/dist/js/plot.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plot.js rename to fedn/fedn/network/dashboard/static/dist/js/plot.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.exporters.svg.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.exporters.svg.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.exporters.svg.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.exporters.svg.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.forceAtlas2.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.noverlap.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.noverlap.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.layout.noverlap.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.layout.noverlap.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.neo4j.cypher.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.neo4j.cypher.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.neo4j.cypher.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.neo4j.cypher.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.gexf.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.gexf.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.gexf.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.gexf.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.json.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.json.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.parsers.json.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.parsers.json.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.pathfinding.astar.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.pathfinding.astar.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.pathfinding.astar.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.pathfinding.astar.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.animate.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.animate.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.animate.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.animate.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.dragNodes.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.dragNodes.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.dragNodes.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.dragNodes.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.filter.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.filter.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.filter.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.filter.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.neighborhoods.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.relativeSize.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.relativeSize.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.plugins.relativeSize.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.plugins.relativeSize.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customEdgeShapes.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customShapes.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customShapes.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.customShapes.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.customShapes.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeDots.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeDots.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeDots.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeDots.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.edgeLabels.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.parallelEdges.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.snapshot.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.snapshot.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.renderers.snapshot.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.renderers.snapshot.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.statistics.HITS.min.js b/fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.statistics.HITS.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/plugins/sigma.statistics.HITS.min.js rename to fedn/fedn/network/dashboard/static/dist/js/plugins/sigma.statistics.HITS.min.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/settings.js b/fedn/fedn/network/dashboard/static/dist/js/settings.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/settings.js rename to fedn/fedn/network/dashboard/static/dist/js/settings.js diff --git a/fedn/fedn/clients/reducer/static/dist/js/sigma.min.js b/fedn/fedn/network/dashboard/static/dist/js/sigma.min.js similarity index 100% rename from fedn/fedn/clients/reducer/static/dist/js/sigma.min.js rename to fedn/fedn/network/dashboard/static/dist/js/sigma.min.js diff --git a/fedn/fedn/network/dashboard/templates/context.html b/fedn/fedn/network/dashboard/templates/context.html new file mode 100644 index 000000000..8f392082a --- /dev/null +++ b/fedn/fedn/network/dashboard/templates/context.html @@ -0,0 +1,34 @@ +{% extends "index.html" %} + +{% block content %} +
+
+
Upload and set compute package
+
{{ message }}
+
+
+
+
+ +
+ +
+ + +
+ +
+
+ +
+
+ +
+
+
+ + +{% endblock %} \ No newline at end of file diff --git a/fedn/fedn/clients/reducer/templates/dashboard.html b/fedn/fedn/network/dashboard/templates/dashboard.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/dashboard.html rename to fedn/fedn/network/dashboard/templates/dashboard.html diff --git a/fedn/fedn/clients/reducer/templates/eula.html b/fedn/fedn/network/dashboard/templates/eula.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/eula.html rename to fedn/fedn/network/dashboard/templates/eula.html diff --git a/fedn/fedn/clients/reducer/templates/events.html b/fedn/fedn/network/dashboard/templates/events.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/events.html rename to fedn/fedn/network/dashboard/templates/events.html diff --git a/fedn/fedn/network/dashboard/templates/index.html b/fedn/fedn/network/dashboard/templates/index.html new file mode 100644 index 000000000..4ac2d182b --- /dev/null +++ b/fedn/fedn/network/dashboard/templates/index.html @@ -0,0 +1,386 @@ + + + + + + + + + + {% if refresh %} + + {% endif %} + + FEDn Reducer + + + + + + + + + + + + + + + + +
+ +
+ + +
+
+

+
+
+ {% if message %} + {% if message_type == 'WARNING' %} + +
+
+ + +
+
+ + + + + + \ No newline at end of file diff --git a/fedn/fedn/clients/reducer/templates/models.html b/fedn/fedn/network/dashboard/templates/models.html similarity index 80% rename from fedn/fedn/clients/reducer/templates/models.html rename to fedn/fedn/network/dashboard/templates/models.html index 0bf9efce7..2dfa4eb6b 100644 --- a/fedn/fedn/clients/reducer/templates/models.html +++ b/fedn/fedn/network/dashboard/templates/models.html @@ -46,21 +46,11 @@
Box plot showing the model validation distr
-
Models
+
Model trail
{{ message }}
- diff --git a/fedn/fedn/clients/reducer/templates/network.html b/fedn/fedn/network/dashboard/templates/network.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/network.html rename to fedn/fedn/network/dashboard/templates/network.html diff --git a/fedn/fedn/clients/reducer/templates/setup.html b/fedn/fedn/network/dashboard/templates/setup.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/setup.html rename to fedn/fedn/network/dashboard/templates/setup.html diff --git a/fedn/fedn/clients/reducer/templates/setup_model.html b/fedn/fedn/network/dashboard/templates/setup_model.html similarity index 100% rename from fedn/fedn/clients/reducer/templates/setup_model.html rename to fedn/fedn/network/dashboard/templates/setup_model.html diff --git a/fedn/fedn/network/loadbalancer/__init__.py b/fedn/fedn/network/loadbalancer/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/fedn/fedn/network/loadbalancer/firstavailable.py b/fedn/fedn/network/loadbalancer/firstavailable.py new file mode 100644 index 000000000..6ffed0806 --- /dev/null +++ b/fedn/fedn/network/loadbalancer/firstavailable.py @@ -0,0 +1,15 @@ +from fedn.network.loadbalancer.loadbalancerbase import LoadBalancerBase + + +class LeastPacked(LoadBalancerBase): + + def __init__(self, network): + super().__init__(network) + + def find_combiner(self): + """ Find the first available combiner. """ + + for combiner in self.network.get_combiners(): + if combiner.allowing_clients(): + return combiner + return None diff --git a/fedn/fedn/network/loadbalancer/leastpacked.py b/fedn/fedn/network/loadbalancer/leastpacked.py new file mode 100644 index 000000000..588e6b491 --- /dev/null +++ b/fedn/fedn/network/loadbalancer/leastpacked.py @@ -0,0 +1,31 @@ +from fedn.network.combiner.interfaces import CombinerUnavailableError +from fedn.network.loadbalancer.loadbalancerbase import LoadBalancerBase + + +class LeastPacked(LoadBalancerBase): + + def __init__(self, network): + super().__init__(network) + + def find_combiner(self): + """ + Find the combiner with the least number of attached clients. + + """ + min_clients = None + selected_combiner = None + + for combiner in self.network.get_combiners(): + try: + if combiner.allowing_clients(): + combiner_state = combiner.report() + if not min_clients: + min_clients = combiner_state['nr_active_clients'] + selected_combiner = combiner + elif combiner_state['nr_active_clients'] < min_clients: + min_clients = combiner_state['nr_active_clients'] + selected_combiner = combiner + except CombinerUnavailableError: + pass + + return selected_combiner diff --git a/fedn/fedn/network/loadbalancer/loadbalancerbase.py b/fedn/fedn/network/loadbalancer/loadbalancerbase.py new file mode 100644 index 000000000..cc39c4200 --- /dev/null +++ b/fedn/fedn/network/loadbalancer/loadbalancerbase.py @@ -0,0 +1,14 @@ +from abc import ABC, abstractmethod + + +class LoadBalancerBase(ABC): + """ Abstract base class for load balancers. """ + + def __init__(self, network): + """ """ + self.network = network + + @abstractmethod + def find_combiner(self): + """ Find a combiner to connect to. """ + pass diff --git a/fedn/fedn/clients/reducer/network.py b/fedn/fedn/network/network.py similarity index 73% rename from fedn/fedn/clients/reducer/network.py rename to fedn/fedn/network/network.py index d851b13ff..15045db82 100644 --- a/fedn/fedn/clients/reducer/network.py +++ b/fedn/fedn/network/network.py @@ -1,21 +1,23 @@ import base64 -from fedn.clients.reducer.interfaces import (CombinerInterface, - CombinerUnavailableError) +from fedn.network.combiner.interfaces import (CombinerInterface, + CombinerUnavailableError) +from fedn.network.loadbalancer.leastpacked import LeastPacked class Network: """ FEDn network. """ - def __init__(self, control, statestore): + def __init__(self, control, statestore, load_balancer=None): """ """ self.statestore = statestore self.control = control self.id = statestore.network_id - @classmethod - def from_statestore(self, network_id): - """ """ + if not load_balancer: + self.load_balancer = LeastPacked(self) + else: + self.load_balancer = load_balancer def get_combiner(self, name): """ @@ -23,7 +25,11 @@ def get_combiner(self, name): :param name: :return: """ - return self.statestore.get_combiner(name) + combiners = self.get_combiners() + for combiner in combiners: + if name == combiner.name: + return combiner + return None def get_combiners(self): """ @@ -56,26 +62,13 @@ def add_combiner(self, combiner): print("Reducer is not idle, cannot add additional combiner.") return - if self.find(combiner.name): + if self.get_combiner(combiner.name): return print("adding combiner {}".format(combiner.name), flush=True) self.statestore.set_combiner(combiner.to_dict()) - def add_client(self, client): - """ Add a new client to the network. - - :param client: - :return: - """ - - if self.find_client(client['name']): - return - - print("adding client {}".format(client['name']), flush=True) - self.statestore.set_client(client) - - def remove(self, combiner): + def remove_combiner(self, combiner): """ :param combiner: @@ -86,19 +79,34 @@ def remove(self, combiner): return self.statestore.delete_combiner(combiner.name) - def find(self, name): + def find_available_combiner(self): """ - :param name: :return: """ - combiners = self.get_combiners() - for combiner in combiners: - if name == combiner.name: - return combiner - return None + combiner = self.load_balancer.find_combiner() + return combiner - def find_client(self, name): + def handle_unavailable_combiner(self, combiner): + """ This callback is triggered if a combiner is found to be unresponsive. """ + # TODO: Implement strategy to handle an unavailable combiner. + print("REDUCER CONTROL: Combiner {} unavailable.".format( + combiner.name), flush=True) + + def add_client(self, client): + """ Add a new client to the network. + + :param client: + :return: + """ + + if self.get_client(client['name']): + return + + print("adding client {}".format(client['name']), flush=True) + self.statestore.set_client(client) + + def get_client(self, name): """ :param name: @@ -107,6 +115,14 @@ def find_client(self, name): ret = self.statestore.get_client(name) return ret + def update_client_data(self, client_data, status, role): + """ Update client status on DB""" + self.statestore.update_client_status(client_data, status, role) + + def get_client_info(self): + """ list available client in DB""" + return self.statestore.list_clients() + def describe(self): """ """ network = [] @@ -121,11 +137,3 @@ def describe(self): def check_health(self): """ """ pass - - def update_client_data(self, client_data, status, role): - """ Update client status on DB""" - self.statestore.update_client_status(client_data, status, role) - - def get_client_info(self): - """ list available client in DB""" - return self.statestore.list_clients() diff --git a/fedn/fedn/reducer.py b/fedn/fedn/network/reducer.py similarity index 86% rename from fedn/fedn/reducer.py rename to fedn/fedn/network/reducer.py index 271207854..47f6aca6d 100644 --- a/fedn/fedn/reducer.py +++ b/fedn/fedn/network/reducer.py @@ -4,10 +4,10 @@ import time from datetime import datetime -from fedn.clients.reducer.control import ReducerControl -from fedn.clients.reducer.restservice import ReducerRestService -from fedn.clients.reducer.state import ReducerStateToString from fedn.common.security.certificatemanager import CertificateManager +from fedn.network.controller.control import Control +from fedn.network.dashboard.restservice import ReducerRestService +from fedn.network.state import ReducerStateToString VALID_NAME_REGEX = '^[a-zA-Z0-9_-]*$' @@ -23,7 +23,7 @@ class MissingReducerConfiguration(Exception): class Reducer: """ A class used to instantiate the Reducer service. - Start Reducer services. + Start Reducer service. """ def __init__(self, statestore): @@ -40,7 +40,6 @@ def __init__(self, statestore): print("REDUCER: Failed to retrive Reducer config, exiting.") raise MissingReducerConfiguration() - print(config, flush=True) # Validate reducer name match = re.search(VALID_NAME_REGEX, config['name']) if not match: @@ -50,7 +49,7 @@ def __init__(self, statestore): # The certificate manager is a utility that generates (self-signed) certificates. self.certificate_manager = CertificateManager(os.getcwd() + "/certs/") - self.control = ReducerControl(self.statestore) + self.control = Control(self.statestore) self.rest = ReducerRestService( config, self.control, self.certificate_manager) @@ -59,7 +58,6 @@ def run(self): """Start REST service and control loop.""" threading.Thread(target=self.control_loop, daemon=True).start() - self.rest.run() def control_loop(self): @@ -77,11 +75,10 @@ def control_loop(self): "Reducer in state {} for {} seconds. Entering {} state".format(ReducerStateToString(old_state), delta.seconds, ReducerStateToString( - self.control.state())), + self.control.state())), flush=True) t1 = datetime.now() old_state = self.control.state() - self.control.monitor() except (KeyboardInterrupt, SystemExit): print("Exiting..", flush=True) diff --git a/fedn/fedn/clients/reducer/state.py b/fedn/fedn/network/state.py similarity index 100% rename from fedn/fedn/clients/reducer/state.py rename to fedn/fedn/network/state.py diff --git a/fedn/fedn/clients/reducer/statestore/__init__.py b/fedn/fedn/network/statestore/__init__.py similarity index 100% rename from fedn/fedn/clients/reducer/statestore/__init__.py rename to fedn/fedn/network/statestore/__init__.py diff --git a/fedn/fedn/clients/reducer/statestore/mongoreducerstatestore.py b/fedn/fedn/network/statestore/mongostatestore.py similarity index 79% rename from fedn/fedn/clients/reducer/statestore/mongoreducerstatestore.py rename to fedn/fedn/network/statestore/mongostatestore.py index f60ae0bad..b9f9d74e1 100644 --- a/fedn/fedn/clients/reducer/statestore/mongoreducerstatestore.py +++ b/fedn/fedn/network/statestore/mongostatestore.py @@ -4,14 +4,13 @@ import pymongo import yaml -from fedn.clients.reducer.state import (ReducerStateToString, - StringToReducerState) from fedn.common.storage.db.mongo import connect_to_mongodb +from fedn.network.state import ReducerStateToString, StringToReducerState -from .reducerstatestore import ReducerStateStore +from .statestorebase import StateStoreBase -class MongoReducerStateStore(ReducerStateStore): +class MongoStateStore(StateStoreBase): """ """ @@ -29,23 +28,21 @@ def __init__(self, network_id, config, defaults=None): self.combiners = self.network['combiners'] self.clients = self.network['clients'] self.storage = self.network['storage'] - self.certificates = self.network['certificates'] + # Control self.control = self.mdb['control'] - self.control_config = self.control['config'] + self.package = self.control['package'] self.state = self.control['state'] self.model = self.control['model'] - self.round = self.control["round"] + self.sessions = self.control['sessions'] + self.rounds = self.control['rounds'] - # Logging and dashboards + # Logging self.status = self.control["status"] - self.round_time = self.control["round_time"] - self.psutil_monitoring = self.control["psutil_monitoring"] - self.combiner_round_time = self.control['combiner_round_time'] self.__inited = True except Exception as e: - print("FAILED TO CONNECT TO MONGO, {}".format(e), flush=True) + print("FAILED TO CONNECT TO MONGODB, {}".format(e), flush=True) self.state = None self.model = None self.control = None @@ -106,14 +103,14 @@ def __init__(self, network_id, config, defaults=None): print(e) def is_inited(self): - """ + """ Check if the statestore is intialized. :return: """ return self.__inited def get_config(self): - """ + """Retrive the statestore config. :return: """ @@ -185,44 +182,32 @@ def get_latest(self): except (KeyError, IndexError): return None - def set_round_config(self, config): - """ + def get_latest_round(self): + """ Get the id of the most recent round. """ - :param config: - """ - self.control.config.update_one( - {'key': 'round_config'}, {'$set': config}, True) + return self.rounds.find_one(sort=[("_id", pymongo.DESCENDING)]) - def get_round_config(self): - """ + def get_round(self, id): + """ Get round with id 'id'. """ - :return: - """ - ret = self.control.config.find({'key': 'round_config'}) - try: - retcheck = ret[0] - if retcheck is None or retcheck == '' or retcheck == ' ': # ugly check for empty string - return None - return retcheck - except (KeyError, IndexError): - return None + return self.rounds.find_one({'round_id': str(id)}) - def set_compute_context(self, filename): - """ + def set_compute_package(self, filename): + """ Set the active compute package. :param filename: """ - self.control.config.update_one( - {'key': 'package'}, {'$set': {'filename': filename}}, True) - self.control.config.update_one({'key': 'package_trail'}, - {'$push': {'filename': filename, 'committed_at': str(datetime.now())}}, True) + self.control.package.update_one( + {'key': 'active'}, {'$set': {'filename': filename}}, True) + self.control.package.update_one({'key': 'package_trail'}, + {'$push': {'filename': filename, 'committed_at': str(datetime.now())}}, True) - def get_compute_context(self): - """ + def get_compute_package(self): + """ Get the active compute package. :return: """ - ret = self.control.config.find({'key': 'package'}) + ret = self.control.package.find({'key': 'active'}) try: retcheck = ret[0] if retcheck is None or retcheck == '' or retcheck == ' ': # ugly check for empty string @@ -231,25 +216,24 @@ def get_compute_context(self): except (KeyError, IndexError): return None - def set_framework(self, helper): + def set_helper(self, helper): """ :param helper: """ - self.control.config.update_one({'key': 'package'}, - {'$set': {'helper': helper}}, True) + self.control.package.update_one({'key': 'active'}, + {'$set': {'helper': helper}}, True) - def get_framework(self): + def get_helper(self): """ :return: """ - ret = self.control.config.find_one({'key': 'package'}) + ret = self.control.package.find_one({'key': 'active'}) # if local compute package used, then 'package' is None - if not ret: - # get framework from round_config instead - ret = self.control.config.find_one({'key': 'round_config'}) - print('FRAMEWORK:', ret) + # if not ret: + # get framework from round_config instead + # ret = self.control.config.find_one({'key': 'round_config'}) try: retcheck = ret['helper'] if retcheck == '' or retcheck == ' ': # ugly check for empty string @@ -349,7 +333,7 @@ def set_combiner(self, combiner_data): '$set': combiner_data}, True) def delete_combiner(self, combiner): - """ """ + """ Delete a combiner entry. """ try: self.combiners.delete_one({'name': combiner}) except Exception: @@ -366,7 +350,7 @@ def set_client(self, client_data): '$set': client_data}, True) def get_client(self, name): - """ """ + """ Retrive a client record by name. """ try: ret = self.clients.find({'key': name}) if list(ret) == []: @@ -377,31 +361,13 @@ def get_client(self, name): return None def list_clients(self): - """ """ + """List all clients registered on the network. """ try: ret = self.clients.find() return list(ret) except Exception: return None - def drop_control(self): - """ """ - # Control - self.state.drop() - self.control_config.drop() - self.control.drop() - - self.drop_models() - - def drop_models(self): - """ """ - self.model.drop() - self.combiner_round_time.drop() - self.status.drop() - self.psutil_monitoring.drop() - self.round_time.drop() - self.round.drop() - def update_client_status(self, client_data, status, role): """ Set or update client status. diff --git a/fedn/fedn/clients/reducer/statestore/reducerstatestore.py b/fedn/fedn/network/statestore/statestorebase.py similarity index 95% rename from fedn/fedn/clients/reducer/statestore/reducerstatestore.py rename to fedn/fedn/network/statestore/statestorebase.py index 45ef0ff10..75e117731 100644 --- a/fedn/fedn/clients/reducer/statestore/reducerstatestore.py +++ b/fedn/fedn/network/statestore/statestorebase.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod -class ReducerStateStore(ABC): +class StateStoreBase(ABC): """ """ diff --git a/fedn/fedn/utils/helpers.py b/fedn/fedn/utils/helpers.py index ac5a588aa..52379fd77 100644 --- a/fedn/fedn/utils/helpers.py +++ b/fedn/fedn/utils/helpers.py @@ -1,63 +1,43 @@ -from abc import ABC, abstractmethod +import importlib +import json +PLUGIN_PATH = "fedn.utils.plugins.{}" -class HelperBase(ABC): - """ Abstract class defining helpers. """ - def __init__(self): - """ """ - - @abstractmethod - def increment_average(self, model, model_next, n): - """ Compute one increment of incremental averaging. - n: the iteration index 1...N in the sequence. - """ - pass +def get_helper(helper_module_name): + """ Return an instance of the helper class. - @abstractmethod - def save_model(self, model, path): - """ - Serialize the model to file on disk on path. - The serialized model must be a single binary object. - """ - pass + :param helper_module_name: The name of the helper plugin module. + :type helper_module_name: str + :return: A helper instance. + :rtype: class: `fedn.utils.helpers.HelperBase` + """ + helper_plugin = PLUGIN_PATH.format(helper_module_name) + helper = importlib.import_module(helper_plugin) + return helper.Helper() - @abstractmethod - def load_model(self, path): - """ Load the model save with save_model from disk on path. """ - pass - @abstractmethod - def serialize_model_to_BytesIO(self, model): - """ Serialize a model to a BytesIO buffered object. """ - pass +def save_metadata(metadata, filename): + """ Save metadata to file. - @abstractmethod - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO buffered object. """ - pass + :param metadata: The metadata to save. + :type metadata: dict + :param filename: The name of the file to save to. + :type filename: str + """ + with open(filename+'-metadata', 'w') as outfile: + json.dump(metadata, outfile) - @abstractmethod - def get_tmp_path(self): - """ Return a temporary output path compatible with save_model, load_model. """ - pass +# Save metric data to file -def get_helper(helper_type): - """ Return an instance of the helper class. +def save_metrics(metrics, filename): + """ Save metrics to file. - :param helper_type (str): The helper type ('keras','pytorch') - :return: + :param metrics: The metrics to save. + :type metrics: dict + :param filename: The name of the file to save to. + :type filename: str """ - if helper_type == 'numpyarray': - # TODO: refactor cyclical import to avoid this ugly line - """ noqa """; from fedn.utils.numpyarrayhelper import NumpyArrayHelper # autopep8: off # noqa: E702 - return NumpyArrayHelper() - elif helper_type == 'keras': - """ noqa """; from fedn.utils.kerashelper import KerasHelper # autopep8: off # noqa: E702 - return KerasHelper() - elif helper_type == 'pytorch': - """ noqa """; from fedn.utils.pytorchhelper import PytorchHelper # autopep8: off # noqa: E702 - return PytorchHelper() - else: - return None + with open(filename, 'w') as outfile: + json.dump(metrics, outfile) diff --git a/fedn/fedn/utils/kerashelper.py b/fedn/fedn/utils/kerashelper.py deleted file mode 100644 index be081e45e..000000000 --- a/fedn/fedn/utils/kerashelper.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -import tempfile -from io import BytesIO - -import numpy as np - -from .helpers import HelperBase - - -class KerasHelper(HelperBase): - """ FEDn helper class for keras.Sequential. """ - - def average_weights(self, weights): - """ Average weights of Keras Sequential models. """ - - avg_w = [] - for i in range(len(weights[0])): - lay_l = np.array([w[i] for w in weights]) - weight_l_avg = np.mean(lay_l, 0) - avg_w.append(weight_l_avg) - - return avg_w - - def increment_average(self, weights, weights_next, n): - """ Update an incremental average. """ - w_prev = weights - w_next = weights_next - w = np.add(w_prev, (np.array(w_next) - np.array(w_prev)) / n) - return w - - def set_weights(self, weights_, weights): - """ - - :param weights_: - :param weights: - """ - weights_ = weights # noqa F841 - - def get_weights(self, weights): - """ - - :param weights: - :return: - """ - return weights - - def get_tmp_path(self): - """ Return a temporary output path compatible with save_model, load_model. """ - fd, path = tempfile.mkstemp(suffix='.npz') - os.close(fd) - return path - - def save_model(self, weights, path=None): - """ - - :param weights: - :param path: - :return: - """ - if not path: - path = self.get_tmp_path() - - weights_dict = {} - for i, w in enumerate(weights): - weights_dict[str(i)] = w - - np.savez_compressed(path, **weights_dict) - - return path - - def load_model(self, path="weights.npz"): - """ - - :param path: - :return: - """ - a = np.load(path) - weights = [] - for i in range(len(a.files)): - weights.append(a[str(i)]) - return weights - - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO object. """ - path = self.get_tmp_path() - with open(path, 'wb') as fh: - fh.write(model_bytesio) - fh.flush() - model = self.load_model(path) - os.unlink(path) - return model - - def serialize_model_to_BytesIO(self, model): - """ - - :param model: - :return: - """ - outfile_name = self.save_model(model) - - a = BytesIO() - a.seek(0, 0) - with open(outfile_name, 'rb') as f: - a.write(f.read()) - os.unlink(outfile_name) - return a diff --git a/fedn/fedn/utils/numpyarrayhelper.py b/fedn/fedn/utils/numpyarrayhelper.py deleted file mode 100644 index fee1ac42b..000000000 --- a/fedn/fedn/utils/numpyarrayhelper.py +++ /dev/null @@ -1,67 +0,0 @@ -import os -import tempfile -from io import BytesIO - -import numpy as np - -from .helpers import HelperBase - - -class NumpyArrayHelper(HelperBase): - """ FEDn helper class for numpy arrays. """ - - def increment_average(self, model, model_next, n): - """ Update an incremental average. """ - return np.add(model, (model_next - model) / n) - - def save_model(self, model, path=None): - """ - - :param model: - :param path: - :return: - """ - if not path: - _, path = tempfile.mkstemp() - np.savetxt(path, model) - return path - - def load_model(self, path): - """ - - :param path: - :return: - """ - model = np.loadtxt(path) - return model - - def serialize_model_to_BytesIO(self, model): - """ - - :param model: - :return: - """ - outfile_name = self.save_model(model) - - a = BytesIO() - a.seek(0, 0) - with open(outfile_name, 'rb') as f: - a.write(f.read()) - os.unlink(outfile_name) - return a - - def get_tmp_path(self): - """ Return a temporary output path compatible with save_model, load_model. """ - fd, path = tempfile.mkstemp() - os.close(fd) - return path - - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO object. """ - path = self.get_tmp_path() - with open(path, 'wb') as fh: - fh.write(model_bytesio) - fh.flush() - model = np.loadtxt(path) - os.unlink(path) - return model diff --git a/fedn/fedn/utils/plugins/__init__.py b/fedn/fedn/utils/plugins/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/fedn/fedn/utils/plugins/helperbase.py b/fedn/fedn/utils/plugins/helperbase.py new file mode 100644 index 000000000..6c9c147e6 --- /dev/null +++ b/fedn/fedn/utils/plugins/helperbase.py @@ -0,0 +1,52 @@ +import os +import tempfile +from abc import ABC, abstractmethod + + +class HelperBase(ABC): + """ Abstract class defining helpers. """ + + def __init__(self): + """ Initialize helper. """ + + self.name = self.__class__.__name__ + + @abstractmethod + def increment_average(self, model, model_next, a, W): + """ Compute one increment of incremental weighted averaging. + + :param model: Current model weights in array-like format. + :param model_next: New model weights in array-like format. + :param a: Number of examples in new model. + :param W: Total number of examples. + :return: Incremental weighted average of model weights. + """ + pass + + @abstractmethod + def save(self, model, path): + """Serialize weights to file. The serialized model must be a single binary object. + + :param model: Weights in array-like format. + :param path: Path to file. + + """ + pass + + @abstractmethod + def load(self, fh): + """ Load weights from file or filelike. + + :param fh: file path, filehandle, filelike. + :return: Weights in array-like format. + """ + pass + + def get_tmp_path(self): + """ Return a temporary output path compatible with save_model, load_model. + + :return: Path to file. + """ + fd, path = tempfile.mkstemp(suffix='.npz') + os.close(fd) + return path diff --git a/fedn/fedn/utils/plugins/kerashelper.py b/fedn/fedn/utils/plugins/kerashelper.py new file mode 100644 index 000000000..195858c76 --- /dev/null +++ b/fedn/fedn/utils/plugins/kerashelper.py @@ -0,0 +1,85 @@ +import numpy as np + +from .helperbase import HelperBase + + +class Helper(HelperBase): + """ FEDn helper class for keras.Sequential. """ + + def __init__(self): + """ Initialize helper. """ + self.name = "kerashelper" + super().__init__() + + # function to calculate an incremental weighted average of the weights + def increment_average(self, model, model_next, num_examples, total_examples): + """ Incremental weighted average of model weights. + + :param model: Current model weights. + :type model: list of numpy arrays. + :param model_next: New model weights. + :type model_next: list of numpy arrays. + :param num_examples: Number of examples in new model. + :type num_examples: int + :param total_examples: Total number of examples. + :type total_examples: int + :return: Incremental weighted average of model weights. + :rtype: list of numpy arrays. + """ + # Incremental weighted average + w = num_examples / total_examples + weights = [] + for i in range(len(model)): + weights.append(w * model[i] + (1 - w) * model_next[i]) + + return weights + + # function to calculate an incremental weighted average of the weights using numpy.add + def increment_average_add(self, model, model_next, num_examples, total_examples): + """ Incremental weighted average of model weights. + + :param model: Current model weights. + :type model: list of numpy arrays. + :param model_next: New model weights. + :type model_next: list of numpy arrays. + :param num_examples: Number of examples in new model. + :type num_examples: int + :param total_examples: Total number of examples. + :type total_examples: int + :return: Incremental weighted average of model weights. + :rtype: list of numpy arrays. + """ + # Incremental weighted average + w = np.add(model, num_examples*(np.array(model_next) - np.array(model)) / total_examples) + return w + + def save(self, weights, path=None): + """ Serialize weights to file. The serialized model must be a single binary object. + + :param weights: List of weights in numpy format. + :param path: Path to file. + :return: Path to file. + """ + if not path: + path = self.get_tmp_path() + + weights_dict = {} + for i, w in enumerate(weights): + weights_dict[str(i)] = w + + np.savez_compressed(path, **weights_dict) + + return path + + def load(self, fh): + """ Load weights from file or filelike. + + :param fh: file path, filehandle, filelike. + :return: List of weights in numpy format. + """ + a = np.load(fh) + + weights = [] + for i in range(len(a.files)): + weights.append(a[str(i)]) + return weights diff --git a/fedn/fedn/utils/plugins/numpyarrayhelper.py b/fedn/fedn/utils/plugins/numpyarrayhelper.py new file mode 100644 index 000000000..9789bf541 --- /dev/null +++ b/fedn/fedn/utils/plugins/numpyarrayhelper.py @@ -0,0 +1,34 @@ +import tempfile + +import numpy as np + +from .helperbase import HelperBase + + +class Helper(HelperBase): + """ FEDn helper class for numpy arrays. """ + + def increment_average(self, model, model_next, n): + """ Update an incremental average. """ + return np.add(model, (model_next - model) / n) + + def save(self, model, path=None): + """Serialize weights/parameters to file. + + :param model: + :param path: + :return: + """ + if not path: + _, path = tempfile.mkstemp() + np.savetxt(path, model) + return path + + def load(self, path): + """Load weights/parameters from file or filelike. + + :param path: + :return: + """ + model = np.loadtxt(path) + return model diff --git a/fedn/fedn/utils/plugins/pytorchhelper.py b/fedn/fedn/utils/plugins/pytorchhelper.py new file mode 100644 index 000000000..d1ce79717 --- /dev/null +++ b/fedn/fedn/utils/plugins/pytorchhelper.py @@ -0,0 +1,62 @@ +from collections import OrderedDict + +import numpy as np + +from .helperbase import HelperBase + + +class Helper(HelperBase): + """ FEDn helper class for pytorch. """ + + def __init__(self): + """ Initialize helper. """ + super().__init__() + self.name = "pytorchhelper" + + def increment_average(self, model, model_next, num_examples, total_examples): + """ Update a weighted incremental average of model weights. + + :param model: Current model weights with keys from torch state_dict. + :type model: OrderedDict + :param model_next: New model weights with keys from torch state_dict. + :type model_next: OrderedDict + :param num_examples: Number of examples in new model. + :type num_examples: int + :param total_examples: Total number of examples. + :type total_examples: int + :return: Incremental weighted average of model weights. + :rtype: OrderedDict + """ + w = OrderedDict() + for name in model.keys(): + tensorDiff = model_next[name] - model[name] + w[name] = model[name] + num_examples*tensorDiff / total_examples + return w + + def save(self, model, path=None): + """ Serialize weights to file. The serialized model must be a single binary object. + + :param model: Weights of model with keys from torch state_dict. + :type model: OrderedDict + :param path: File path. + :type path: str + :return: Path to file (generated as tmp file unless path is set). + :rtype: str + """ + if not path: + path = self.get_tmp_path() + np.savez_compressed(path, **model) + return path + + def load(self, path): + """ Load weights from file or filelike. + + :param path: file path, filehandle, filelike. + :type path: str + :return: Weights of model with keys from torch state_dict. + """ + a = np.load(path) + weights_np = OrderedDict() + for i in a.files: + weights_np[i] = a[i] + return weights_np diff --git a/fedn/fedn/utils/plugins/tests/test_kerashelper.py b/fedn/fedn/utils/plugins/tests/test_kerashelper.py new file mode 100644 index 000000000..5e392b47c --- /dev/null +++ b/fedn/fedn/utils/plugins/tests/test_kerashelper.py @@ -0,0 +1,94 @@ +import os +import unittest + +import numpy as np + +from fedn.utils.plugins.kerashelper import Helper as KerasHelper + + +class TestKerasHelper(unittest.TestCase): + """Test the KerasHelper class.""" + + def setUp(self): + self.helper = KerasHelper() + + def test_increment_average(self): + """Test the increment_average method.""" + # Test with a list + model = [1, 2, 3] + model_next = [4, 5, 6] + a = 10 + W = 20 + + result = self.helper.increment_average(model, model_next, a, W) + + self.assertEqual(result, [2.5, 3.5, 4.5]) + + # Test with a numpy array + model = np.array([1, 2, 3]) + model_next = np.array([4, 5, 6]) + + result = self.helper.increment_average(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([2.5, 3.5, 4.5])) + + # test with a list of numpy arrays + model = [np.array([1, 2, 3])] + model_next = [np.array([4, 5, 6])] + + result = self.helper.increment_average(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([[2.5, 3.5, 4.5]])) + + def test_increment_average_add(self): + """Test the increment_average_add method.""" + model = [1, 2, 3] + model_next = [4, 5, 6] + a = 10 + W = 20 + + result = self.helper.increment_average_add(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([2.5, 3.5, 4.5])) + + # Test with a numpy array + model = np.array([1, 2, 3]) + model_next = np.array([4, 5, 6]) + + result = self.helper.increment_average_add(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([2.5, 3.5, 4.5])) + + # test with a list of numpy arrays + model = [np.array([1, 2, 3])] + model_next = [np.array([4, 5, 6])] + + result = self.helper.increment_average_add(model, model_next, a, W) + + np.testing.assert_array_equal(result, np.array([[2.5, 3.5, 4.5]])) + + def test_save(self): + """Test the save method.""" + weights = [1, 2, 3] + + result = self.helper.save(weights, 'test.npz') + + self.assertEqual(result, 'test.npz') + + def test_load(self): + """Test the load method.""" + weights = [1, 2, 3] + + result = self.helper.save(weights, 'test.npz') + result = self.helper.load('test.npz') + + self.assertEqual(result, [1, 2, 3]) + + # Tear down method, remove test.npz + def tearDown(self): + if os.path.exists('test.npz'): + os.remove('test.npz') + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/utils/plugins/tests/test_pytorchhelper.py b/fedn/fedn/utils/plugins/tests/test_pytorchhelper.py new file mode 100644 index 000000000..4eb98c7f9 --- /dev/null +++ b/fedn/fedn/utils/plugins/tests/test_pytorchhelper.py @@ -0,0 +1,63 @@ +import os +import unittest + +import numpy as np + +from fedn.utils.plugins.pytorchhelper import Helper as PyTorchHelper + + +class TestPyTorchHelper(unittest.TestCase): + """Test the PyTorchHelper class.""" + + def setUp(self): + self.helper = PyTorchHelper() + + def test_increment_average(self): + """Test the increment_average method. The weights are stored as OrderedDicts.""" + + # Model as OrderedDict with keys as torch layers and values as numpy arrays + model = {'layer1': np.array([1, 2, 3])} + model_next = {'layer1': np.array([4, 5, 6])} + a = 10 + W = 20 + + result = self.helper.increment_average(model, model_next, a, W) + + # Check OrderedDict values match + np.testing.assert_array_equal(result['layer1'], np.array([2.5, 3.5, 4.5])) + + # Model as OrderedDict with keys as torch layers and values as lists + model = {'layer1': [1, 2, 3]} + model_next = {'layer1': [4, 5, 6]} + a = 10 + W = 20 + + # Catch TypeError: unsupported operand type(s) for -: 'list' and 'list' + with self.assertRaises(TypeError): + result = self.helper.increment_average(model, model_next, a, W) + + # Test save and load methods + def test_save_load(self): + """Test the save and load methods.""" + + # Create a model + model = {'layer1': np.array([1, 2, 3])} + + # Save the model + self.helper.save(model, 'test_model') + + # Check if the model file exists + self.assertTrue(os.path.exists('test_model.npz')) + + # Load the model + result = self.helper.load('test_model.npz') + + # Check OrderedDict values match + np.testing.assert_array_equal(result['layer1'], np.array([1, 2, 3])) + + # Remove the model file + os.remove('test_model.npz') + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/fedn/utils/pytorchhelper.py b/fedn/fedn/utils/pytorchhelper.py deleted file mode 100644 index fe1330b17..000000000 --- a/fedn/fedn/utils/pytorchhelper.py +++ /dev/null @@ -1,77 +0,0 @@ -import os -import tempfile -from collections import OrderedDict -from io import BytesIO - -import numpy as np - -from .helpers import HelperBase - - -class PytorchHelper(HelperBase): - - def increment_average(self, model, model_next, n): - """ Update an incremental average. """ - w = OrderedDict() - for name in model.keys(): - tensorDiff = model_next[name] - model[name] - w[name] = model[name] + tensorDiff / n - return w - - def get_tmp_path(self): - """ - - :return: - """ - fd, path = tempfile.mkstemp(suffix='.npz') - os.close(fd) - return path - - def save_model(self, weights_dict, path=None): - """ - - :param weights_dict: - :param path: - :return: - """ - if not path: - path = self.get_tmp_path() - np.savez_compressed(path, **weights_dict) - return path - - def load_model(self, path="weights.npz"): - """ - - :param path: - :return: - """ - b = np.load(path) - weights_np = OrderedDict() - for i in b.files: - weights_np[i] = b[i] - return weights_np - - def load_model_from_BytesIO(self, model_bytesio): - """ Load a model from a BytesIO object. """ - path = self.get_tmp_path() - with open(path, 'wb') as fh: - fh.write(model_bytesio) - fh.flush() - model = self.load_model(path) - os.unlink(path) - return model - - def serialize_model_to_BytesIO(self, model): - """ - - :param model: - :return: - """ - outfile_name = self.save_model(model) - - a = BytesIO() - a.seek(0, 0) - with open(outfile_name, 'rb') as f: - a.write(f.read()) - os.unlink(outfile_name) - return a diff --git a/fedn/fedn/utils/tests/test_helpers.py b/fedn/fedn/utils/tests/test_helpers.py new file mode 100644 index 000000000..9dfcdd36f --- /dev/null +++ b/fedn/fedn/utils/tests/test_helpers.py @@ -0,0 +1,57 @@ +import os +import unittest + +from fedn.utils.helpers import get_helper, save_metadata, save_metrics + + +class TestHelpers(unittest.TestCase): + + def test_get_helper(self): + helper = get_helper('pytorchhelper') + + # Check that helper is not None + self.assertTrue(helper is not None) + + # Check that helper nane is correct + self.assertTrue(helper.name == 'pytorchhelper') + + def test_save_metadata(self): + metadata = {'test': 'test'} + save_metadata(metadata, 'test') + + # Check that file exists + self.assertTrue(os.path.exists('test-metadata')) + + # Check that file is not empty + self.assertTrue(os.path.getsize('test-metadata') > 0) + + # Check that file contains the correct data + with open('test-metadata', 'r') as f: + data = f.read() + self.assertTrue(data == '{"test": "test"}') + + def test_save_metrics(self): + metrics = {'test': 'test'} + save_metrics(metrics, 'test_metrics.json') + + # Check that file exists + self.assertTrue(os.path.exists('test_metrics.json')) + + # Check that file is not empty + self.assertTrue(os.path.getsize('test_metrics.json') > 0) + + # Check that file contains the correct data + with open('test_metrics.json', 'r') as f: + data = f.read() + self.assertTrue(data == '{"test": "test"}') + + # Clean up (remove files) + def tearDown(self): + if os.path.exists('test-metadata'): + os.remove('test-metadata') + if os.path.exists('test_metrics.json'): + os.remove('test_metrics.json') + + +if __name__ == '__main__': + unittest.main() diff --git a/fedn/setup.py b/fedn/setup.py index 764b09170..93eea674d 100644 --- a/fedn/setup.py +++ b/fedn/setup.py @@ -2,7 +2,7 @@ setup( name='fedn', - version='0.4.1', + version='0.5.0', description="""Scaleout Federated Learning""", long_description=open('README.md').read(), long_description_content_type="text/markdown", @@ -17,9 +17,9 @@ "urllib3>=1.26.4", "minio", "python-slugify", - "grpcio~=1.47.0", + "grpcio~=1.48.0", "grpcio-tools", - "numpy>=1.21.6,<=1.22.2", + "numpy>=1.21.6", "protobuf", "pymongo", "Flask", @@ -47,5 +47,6 @@ 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], )