Skip to content

Commit

Permalink
fixed some missing API reference in fs.core (#54)
Browse files Browse the repository at this point in the history
As the title says.
  • Loading branch information
yxdyc authored May 6, 2022
1 parent 96c1237 commit c72a8a5
Show file tree
Hide file tree
Showing 8 changed files with 32 additions and 33 deletions.
9 changes: 9 additions & 0 deletions federatedscope/core/configs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,19 @@
if "config" in all_sub_configs:
all_sub_configs.remove('config')


from federatedscope.core.configs.config import CN, init_global_cfg
__all__ = __all__ + \
[
'CN',
'init_global_cfg'
]

# reorder the config to ensure the base config will be registered first
base_configs = [
'cfg_data', 'cfg_fl_setting', 'cfg_model', 'cfg_training', 'cfg_evaluation'
]
for base_config in base_configs:
all_sub_configs.pop(all_sub_configs.index(base_config))
all_sub_configs.insert(0, base_config)

5 changes: 5 additions & 0 deletions federatedscope/core/monitors/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
from federatedscope.core.monitors.early_stopper import EarlyStopper
from federatedscope.core.monitors.metric_calculator import MetricCalculator
from federatedscope.core.monitors.monitor import Monitor

__all__ = ['EarlyStopper', 'MetricCalculator', 'Monitor']
13 changes: 6 additions & 7 deletions federatedscope/core/trainers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
from federatedscope.core.trainers.trainer import Trainer, GeneralTorchTrainer
from federatedscope.core.trainers.trainer_multi_model import GeneralMultiModelTrainer
from federatedscope.core.trainers.trainer_pFedMe import wrap_pFedMeTrainer
from federatedscope.core.trainers.trainer_Ditto import wrap_DittoTrainer
from federatedscope.core.trainers.trainer_FedEM import FedEMTrainer
from federatedscope.core.trainers.context import Context
from federatedscope.core.trainers.trainer_fedprox import wrap_fedprox_trainer
from federatedscope.core.trainers.trainer_nbafl import wrap_nbafl_trainer, wrap_nbafl_server

__all__ = [
'Trainer',
'Context',
'GeneralTorchTrainer',
'wrap_pFedMeTrainer',
'wrap_fedprox_trainer',
'wrap_nbafl_trainer',
'wrap_nbafl_server'
'Trainer', 'Context', 'GeneralTorchTrainer', 'GeneralMultiModelTrainer',
'wrap_pFedMeTrainer', 'wrap_DittoTrainer', 'FedEMTrainer',
'wrap_fedprox_trainer', 'wrap_nbafl_trainer', 'wrap_nbafl_server'
]
5 changes: 1 addition & 4 deletions federatedscope/mf/dataloader/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@
from federatedscope.mf.dataloader.dataloader import load_mf_dataset, MFDataLoader

__all__ = [
'load_mf_dataset',
'MFDataLoader'
]
__all__ = ['load_mf_dataset', 'MFDataLoader']
13 changes: 3 additions & 10 deletions federatedscope/mf/dataset/__init__.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,6 @@
from federatedscope.mf.dataset.movielens import *

__all__ = [
'VMFDataset',
'HMFDataset',
'MovieLensData',
'MovieLens1M',
'MovieLens10M',
'VFLMovieLens1M',
'HFLMovieLens1M',
'VFLMovieLens10M',
'HFLMovieLens10M'
]
'VMFDataset', 'HMFDataset', 'MovieLensData', 'MovieLens1M', 'MovieLens10M',
'VFLMovieLens1M', 'HFLMovieLens1M', 'VFLMovieLens10M', 'HFLMovieLens10M'
]
7 changes: 1 addition & 6 deletions federatedscope/mf/model/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,4 @@
from federatedscope.mf.model.model import BasicMFNet, VMFNet, HMFNet
from federatedscope.mf.model.model_builder import get_mfnet

__all__ = [
"get_mfnet",
"BasicMFNet",
"VMFNet",
"HMFNet"
]
__all__ = ["get_mfnet", "BasicMFNet", "VMFNet", "HMFNet"]
7 changes: 2 additions & 5 deletions federatedscope/mf/trainer/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@
from federatedscope.mf.trainer.trainer_sgdmf import wrap_MFTrainer, init_sgdmf_ctx, embedding_clip, hook_on_batch_backward

__all__ = [
'MFTrainer',
'wrap_MFTrainer',
'init_sgdmf_ctx',
'embedding_clip',
'MFTrainer', 'wrap_MFTrainer', 'init_sgdmf_ctx', 'embedding_clip',
'hook_on_batch_backward'
]
]
6 changes: 5 additions & 1 deletion scripts/parse_exp_results_wandb.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def main():
if res['Role'] == 'Server #':
cur_round = res['Round']
res.pop('Role')
if cur_round != "Final":
if cur_round != "Final" and 'Results_raw' in res:
res.pop('Results_raw')

log_res = {}
Expand All @@ -110,6 +110,10 @@ def main():

else:
exp_stop_normal = True
if key == "Results_raw":
for final_type, final_type_dict in res["Results_raw"].items():
for inner_key, inner_val in final_type_dict.items():
log_res_best[f"{final_type}/{inner_key}"] = inner_val
# log_res_best = {}
# for best_res_type, val_dict in val.items():
# for key_inner, val_inner in val_dict.items():
Expand Down

0 comments on commit c72a8a5

Please sign in to comment.