Skip to content

Commit

Permalink
Support help and required argument for the configs (#294)
Browse files Browse the repository at this point in the history
- support help and required argument for the configs
- added de-arguments for compatibility with historical codes
  • Loading branch information
yxdyc authored Aug 11, 2022
1 parent f14d2c5 commit dd012a8
Show file tree
Hide file tree
Showing 17 changed files with 782 additions and 170 deletions.
18 changes: 18 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -335,6 +335,24 @@ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

------------------------------------------------------------------------------

Code in federatedscope/core/configs/yacs_config.py, the basic code of yacs
adopts Apache 2.0 License

Copyright (c) 2018-present, Facebook, Inc.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

--------------------------------------------------------------------------------

Expand Down
2 changes: 1 addition & 1 deletion benchmark/FedHPOB/fedhpob/config.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import ConfigSpace as CS
from yacs.config import CfgNode as CN
from federatedscope.core.configs.config import CN
from fedhpob.benchmarks import TabularBenchmark
from fedhpob.benchmarks import RawBenchmark
from fedhpob.benchmarks import SurrogateBenchmark
Expand Down
2 changes: 1 addition & 1 deletion federatedscope/attack/auxiliary/attack_trainer_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ def wrap_attacker_trainer(base_trainer, config):
Args:
base_trainer (core.trainers.GeneralTorchTrainer): the trainer that
will be wrapped;
config (yacs.config.CfgNode): the configure;
config (federatedscope.core.configs.config.CN): the configure;
:returns:
The wrapped trainer; Type: core.trainers.GeneralTorchTrainer
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def __init__(self,
grad_clip=self._cfg.grad.grad_clip,
dataset_name=self._cfg.data.type,
fl_local_update_num=self._cfg.train.local_update_steps,
fl_type_optimizer=self._cfg.fedopt.optimizer.type,
fl_type_optimizer=self._cfg.train.optimizer.type,
fl_lr=self._cfg.train.optimizer.lr,
batch_size=100)

Expand Down
8 changes: 4 additions & 4 deletions federatedscope/autotune/algos.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import math

import ConfigSpace as CS
from yacs.config import CfgNode as CN
import yaml
import numpy as np

Expand Down Expand Up @@ -68,7 +67,7 @@ def run(self):
def get_scheduler(init_cfg):
"""To instantiate a scheduler object for conducting HPO
Arguments:
init_cfg (yacs.Node): configuration.
init_cfg (federatedscope.core.configs.config.CN): configuration.
"""

if init_cfg.hpo.scheduler == 'rs':
Expand All @@ -88,8 +87,9 @@ class Scheduler(object):
def __init__(self, cfg):
"""
Arguments:
cfg (yacs.Node): dict like object, where each key-value pair
corresponds to a field and its choices.
cfg (federatedscope.core.configs.config.CN): dict like object,
where each key-value pair corresponds to a field and its
choices.
"""

self._cfg = cfg
Expand Down
14 changes: 12 additions & 2 deletions federatedscope/core/auxiliaries/optimizer_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,17 +3,27 @@
except ImportError:
torch = None

import copy


def get_optimizer(model, type, lr, **kwargs):
if torch is None:
return None
# in case of users have not called the cfg.freeze()
tmp_kwargs = copy.deepcopy(kwargs)
if '__help_info__' in tmp_kwargs:
del tmp_kwargs['__help_info__']
if '__cfg_check_funcs__' in tmp_kwargs:
del tmp_kwargs['__cfg_check_funcs__']
if 'is_ready_for_run' in tmp_kwargs:
del tmp_kwargs['is_ready_for_run']
if isinstance(type, str):
if hasattr(torch.optim, type):
if isinstance(model, torch.nn.Module):
return getattr(torch.optim, type)(model.parameters(), lr,
**kwargs)
**tmp_kwargs)
else:
return getattr(torch.optim, type)(model, lr, **kwargs)
return getattr(torch.optim, type)(model, lr, **tmp_kwargs)
else:
raise NotImplementedError(
'Optimizer {} not implement'.format(type))
Expand Down
2 changes: 1 addition & 1 deletion federatedscope/core/auxiliaries/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def init_wandb(cfg):
tmp_cfg = copy.deepcopy(cfg)
if tmp_cfg.is_frozen():
tmp_cfg.defrost()
tmp_cfg.cfg_check_funcs.clear(
tmp_cfg.clear_check_funcs(
) # in most cases, no need to save the cfg_check_funcs via wandb
import yaml
cfg_yaml = yaml.safe_load(tmp_cfg.dump())
Expand Down
24 changes: 21 additions & 3 deletions federatedscope/core/cmd_args.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import argparse
import sys
from federatedscope.core.configs.config import global_cfg


def parse_args(args=None):
parser = argparse.ArgumentParser(description='FederatedScope')
parser = argparse.ArgumentParser(description='FederatedScope',
add_help=False)
parser.add_argument('--cfg',
dest='cfg_file',
help='Config file path',
Expand All @@ -15,12 +17,28 @@ def parse_args(args=None):
required=False,
default=None,
type=str)
parser.add_argument(
'--help',
nargs="?",
default="all",
)
parser.add_argument('opts',
help='See federatedscope/core/configs for all options',
default=None,
nargs=argparse.REMAINDER)
if len(sys.argv) == 1:
parse_res = parser.parse_args(args)
init_cfg = global_cfg.clone()
# when users type only "main.py" or "main.py help"
if len(sys.argv) == 1 or parse_res.help == "all":
parser.print_help()
init_cfg.print_help()
sys.exit(1)
elif hasattr(parse_res, "help") and isinstance(parse_res.help, str):
init_cfg.print_help(parse_res.help)
sys.exit(1)
elif hasattr(parse_res, "help") and isinstance(parse_res.help, list):
for query in parse_res.help:
init_cfg.print_help(query)
sys.exit(1)

return parser.parse_args(args)
return parse_res
11 changes: 7 additions & 4 deletions federatedscope/core/configs/cfg_fl_algo.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,24 @@
from federatedscope.core.configs.config import CN
from federatedscope.core.configs.yacs_config import Argument
from federatedscope.register import register_config


def extend_fl_algo_cfg(cfg):
# ---------------------------------------------------------------------- #
# fedopt related options, general fl
# fedopt related options, a general fl algorithm
# ---------------------------------------------------------------------- #
cfg.fedopt = CN()

cfg.fedopt.use = False

cfg.fedopt.optimizer = CN(new_allowed=True)
cfg.fedopt.optimizer.type = 'SGD'
cfg.fedopt.optimizer.lr = 0.01
cfg.fedopt.optimizer.type = Argument(
'SGD', description="optimizer type for FedOPT")
cfg.fedopt.optimizer.lr = Argument(
0.01, description="learning rate for FedOPT optimizer")

# ---------------------------------------------------------------------- #
# fedprox related options, general fl
# fedprox related options, a general fl algorithm
# ---------------------------------------------------------------------- #
cfg.fedprox = CN()

Expand Down
Loading

0 comments on commit dd012a8

Please sign in to comment.