Skip to content

Commit

Permalink
Defined pseudo_gradient
Browse files Browse the repository at this point in the history
  • Loading branch information
Andreas Hellander committed Jan 9, 2025
1 parent 0150ab9 commit ef184fa
Showing 1 changed file with 27 additions and 23 deletions.
50 changes: 27 additions & 23 deletions fedn/network/combiner/aggregators/fedopt.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,9 @@ def combine_models(self, helper=None, delete_models=True, parameters=None):
if key not in parameters:
parameters[key] = value

model = None
nr_aggregated_models = 0
total_examples = 0
# Aggregation initialization
model, pseudo_gradient = None, None
nr_aggregated_models, total_examples = 0, 0

logger.info(
"AGGREGATOR({}): Aggregating model updates... ".format(self.name))
Expand Down Expand Up @@ -139,30 +139,34 @@ def combine_models(self, helper=None, delete_models=True, parameters=None):
self.name, model_update.model_update_id))
except Exception as e:
logger.error(
"AGGREGATOR({}): Error encoutered while processing model update {}, skipping this update.".format(self.name, e))
"AGGREGATOR({}): Error encoutered while processing model update {}, skiphttps://github.com/scaleoutsystems/fedn/pull/770ping this update.".format(self.name, e))

try:
if parameters["serveropt"] == "adam":
model = self.serveropt_adam(
helper, pseudo_gradient, model_old, parameters)
elif parameters["serveropt"] == "yogi":
model = self.serveropt_yogi(
helper, pseudo_gradient, model_old, parameters)
elif parameters["serveropt"] == "adagrad":
model = self.serveropt_adagrad(
helper, pseudo_gradient, model_old, parameters)
else:
logger.error("Unsupported server optimizer passed to FedOpt.")
data["nr_aggregated_models"] = nr_aggregated_models

if pseudo_gradient:
try:
if parameters["serveropt"] == "adam":
model = self.serveropt_adam(
helper, pseudo_gradient, model_old, parameters)
elif parameters["serveropt"] == "yogi":
model = self.serveropt_yogi(
helper, pseudo_gradient, model_old, parameters)
elif parameters["serveropt"] == "adagrad":
model = self.serveropt_adagrad(
helper, pseudo_gradient, model_old, parameters)
else:
logger.error(
"Unsupported server optimizer passed to FedOpt.")
return None, data
except Exception as e:
tb = traceback.format_exc()
logger.error(
"AGGREGATOR({}): Error encoutered while while aggregating: {}".format(self.name, e))
logger.error(tb)
return None, data
except Exception as e:
tb = traceback.format_exc()
logger.error(
"AGGREGATOR({}): Error encoutered while while aggregating: {}".format(self.name, e))
logger.error(tb)
else:
return None, data

data["nr_aggregated_models"] = nr_aggregated_models

logger.info("AGGREGATOR({}): Aggregation completed, aggregated {} models.".format(
self.name, nr_aggregated_models))
return model, data
Expand Down

0 comments on commit ef184fa

Please sign in to comment.