Skip to content

Commit

Permalink
Move docs from __init__ to right after class defiition (adap#2692)
Browse files Browse the repository at this point in the history
Co-authored-by: Charles Beauville <[email protected]>
  • Loading branch information
adam-narozniak and charlesbvll authored Dec 8, 2023
1 parent dbf56d9 commit 1804a6a
Show file tree
Hide file tree
Showing 11 changed files with 431 additions and 440 deletions.
72 changes: 36 additions & 36 deletions src/py/flwr/server/strategy/bulyan.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,43 @@


# flake8: noqa: E501
# pylint: disable=line-too-long
class Bulyan(FedAvg):
"""Bulyan strategy implementation."""

# pylint: disable=too-many-arguments,too-many-instance-attributes,line-too-long, too-many-locals
"""Bulyan strategy.
Implementation based on https://arxiv.org/abs/1802.07927.
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
num_malicious_clients : int, optional
Number of malicious clients in the system. Defaults to 0.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]], Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters, optional
Initial global model parameters.
first_aggregation_rule: Callable
Byzantine resilient aggregation rule that is used as the first step of the Bulyan (e.g., Krum)
**aggregation_rule_kwargs: Any
arguments to the first_aggregation rule
"""

# pylint: disable=too-many-arguments,too-many-instance-attributes,too-many-locals
def __init__(
self,
*,
Expand All @@ -66,39 +99,6 @@ def __init__(
first_aggregation_rule: Callable = aggregate_krum, # type: ignore
**aggregation_rule_kwargs: Any,
) -> None:
"""Bulyan strategy.
Implementation based on https://arxiv.org/abs/1802.07927.
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
num_malicious_clients : int, optional
Number of malicious clients in the system. Defaults to 0.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]], Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters, optional
Initial global model parameters.
first_aggregation_rule: Callable
Byzantine resilient aggregation rule that is used as the first step of the Bulyan (e.g., Krum)
**aggregation_rule_kwargs: Any
arguments to the first_aggregation rule
"""
super().__init__(
fraction_fit=fraction_fit,
fraction_evaluate=fraction_evaluate,
Expand Down
75 changes: 36 additions & 39 deletions src/py/flwr/server/strategy/fedadagrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,47 @@
from .fedopt import FedOpt


# pylint: disable=line-too-long
class FedAdagrad(FedOpt):
"""FedAdagrad strategy - Adaptive Federated Optimization using Adagrad.
Paper: https://arxiv.org/abs/2003.00295
Implementation based on https://arxiv.org/abs/2003.00295v5
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]],Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
fit_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
evaluate_metrics_aggregation_fn: Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters
Initial global model parameters.
eta : float, optional
Server-side learning rate. Defaults to 1e-1.
eta_l : float, optional
Client-side learning rate. Defaults to 1e-1.
tau : float, optional
Controls the algorithm's degree of adaptability. Defaults to 1e-9.
"""

# pylint: disable=too-many-arguments,too-many-locals,too-many-instance-attributes, line-too-long
# pylint: disable=too-many-arguments,too-many-locals,too-many-instance-attributes
def __init__(
self,
*,
Expand All @@ -69,43 +103,6 @@ def __init__(
eta_l: float = 1e-1,
tau: float = 1e-9,
) -> None:
"""Federated learning strategy using Adagrad on server-side.
Implementation based on https://arxiv.org/abs/2003.00295v5
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]], Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
fit_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
evaluate_metrics_aggregation_fn: Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters
Initial global model parameters.
eta : float, optional
Server-side learning rate. Defaults to 1e-1.
eta_l : float, optional
Client-side learning rate. Defaults to 1e-1.
tau : float, optional
Controls the algorithm's degree of adaptability. Defaults to 1e-9.
"""
super().__init__(
fraction_fit=fraction_fit,
fraction_evaluate=fraction_evaluate,
Expand Down
83 changes: 40 additions & 43 deletions src/py/flwr/server/strategy/fedadam.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,51 @@
from .fedopt import FedOpt


# pylint: disable=line-too-long
class FedAdam(FedOpt):
"""FedAdam - Adaptive Federated Optimization using Adam.
Paper: https://arxiv.org/abs/2003.00295
Implementation based on https://arxiv.org/abs/2003.00295v5
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]],Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters
Initial global model parameters.
fit_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
evaluate_metrics_aggregation_fn: Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
eta : float, optional
Server-side learning rate. Defaults to 1e-1.
eta_l : float, optional
Client-side learning rate. Defaults to 1e-1.
beta_1 : float, optional
Momentum parameter. Defaults to 0.9.
beta_2 : float, optional
Second moment parameter. Defaults to 0.99.
tau : float, optional
Controls the algorithm's degree of adaptability. Defaults to 1e-9.
"""

# pylint: disable=too-many-arguments,too-many-instance-attributes,too-many-locals, line-too-long
# pylint: disable=too-many-arguments,too-many-instance-attributes,too-many-locals
def __init__(
self,
*,
Expand All @@ -71,47 +109,6 @@ def __init__(
beta_2: float = 0.99,
tau: float = 1e-9,
) -> None:
"""Federated learning strategy using Adagrad on server-side.
Implementation based on https://arxiv.org/abs/2003.00295v5
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]], Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters
Initial global model parameters.
fit_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
evaluate_metrics_aggregation_fn: Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
eta : float, optional
Server-side learning rate. Defaults to 1e-1.
eta_l : float, optional
Client-side learning rate. Defaults to 1e-1.
beta_1 : float, optional
Momentum parameter. Defaults to 0.9.
beta_2 : float, optional
Second moment parameter. Defaults to 0.99.
tau : float, optional
Controls the algorithm's degree of adaptability. Defaults to 1e-9.
"""
super().__init__(
fraction_fit=fraction_fit,
fraction_evaluate=fraction_evaluate,
Expand Down
72 changes: 36 additions & 36 deletions src/py/flwr/server/strategy/fedavg.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,43 @@
"""


# pylint: disable=line-too-long
class FedAvg(Strategy):
"""Configurable FedAvg strategy implementation."""
"""Federated Averaging strategy.
Implementation based on https://arxiv.org/abs/1602.05629
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. In case `min_fit_clients`
is larger than `fraction_fit * available_clients`, `min_fit_clients`
will still be sampled. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. In case `min_evaluate_clients`
is larger than `fraction_evaluate * available_clients`,
`min_evaluate_clients` will still be sampled. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]],Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters, optional
Initial global model parameters.
fit_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
evaluate_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
"""

# pylint: disable=too-many-arguments,too-many-instance-attributes, line-too-long
def __init__(
Expand All @@ -73,41 +108,6 @@ def __init__(
fit_metrics_aggregation_fn: Optional[MetricsAggregationFn] = None,
evaluate_metrics_aggregation_fn: Optional[MetricsAggregationFn] = None,
) -> None:
"""Federated Averaging strategy.
Implementation based on https://arxiv.org/abs/1602.05629
Parameters
----------
fraction_fit : float, optional
Fraction of clients used during training. In case `min_fit_clients`
is larger than `fraction_fit * available_clients`, `min_fit_clients`
will still be sampled. Defaults to 1.0.
fraction_evaluate : float, optional
Fraction of clients used during validation. In case `min_evaluate_clients`
is larger than `fraction_evaluate * available_clients`,
`min_evaluate_clients` will still be sampled. Defaults to 1.0.
min_fit_clients : int, optional
Minimum number of clients used during training. Defaults to 2.
min_evaluate_clients : int, optional
Minimum number of clients used during validation. Defaults to 2.
min_available_clients : int, optional
Minimum number of total clients in the system. Defaults to 2.
evaluate_fn : Optional[Callable[[int, NDArrays, Dict[str, Scalar]], Optional[Tuple[float, Dict[str, Scalar]]]]]
Optional function used for validation. Defaults to None.
on_fit_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure training. Defaults to None.
on_evaluate_config_fn : Callable[[int], Dict[str, Scalar]], optional
Function used to configure validation. Defaults to None.
accept_failures : bool, optional
Whether or not accept rounds containing failures. Defaults to True.
initial_parameters : Parameters, optional
Initial global model parameters.
fit_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
evaluate_metrics_aggregation_fn : Optional[MetricsAggregationFn]
Metrics aggregation function, optional.
"""
super().__init__()

if (
Expand Down
Loading

0 comments on commit 1804a6a

Please sign in to comment.