Skip to content

Commit

Permalink
Clean up documentation and remove Singleton meta-class
Browse files Browse the repository at this point in the history
  • Loading branch information
JMGaljaard committed Sep 16, 2022
1 parent de6966b commit 4e451db
Show file tree
Hide file tree
Showing 18 changed files with 408 additions and 301 deletions.
22 changes: 1 addition & 21 deletions fltk/datasets/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,4 @@
from fltk.datasets.cifar100 import CIFAR100Dataset
from fltk.datasets.fashion_mnist import FashionMNISTDataset
from fltk.datasets.mnist import MNIST
from fltk.util.config.definitions import Dataset


def get_train_loader_path(name: Dataset) -> str:
paths = {
Dataset.cifar10: 'data_loaders/cifar10/train_data_loader.pickle',
Dataset.fashion_mnist: 'data_loaders/fashion-mnist/train_data_loader.pickle',
Dataset.cifar100: 'data_loaders/cifar100/train_data_loader.pickle',
Dataset.mnist: 'data_loaders/mnist/train_data_loader.pickle',
}
return paths[name]


def get_test_loader_path(name: Dataset) -> str:
paths = {
Dataset.cifar10: 'data_loaders/cifar10/test_data_loader.pickle',
Dataset.fashion_mnist: 'data_loaders/fashion-mnist/test_data_loader.pickle',
Dataset.cifar100: 'data_loaders/cifar100/test_data_loader.pickle',
Dataset.mnist: 'data_loaders/mnist/test_data_loader.pickle',
}
return paths[name]
from fltk.datasets.dataset import Dataset
4 changes: 2 additions & 2 deletions fltk/util/cluster/client.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import abc
import logging
import time
from collections import defaultdict
Expand All @@ -16,7 +17,6 @@
V1VolumeMount, V1Toleration, V1Volume, V1PersistentVolumeClaimVolumeSource, V1ConfigMapVolumeSource

from fltk.util.cluster.conversion import Convert
from fltk.util.singleton import Singleton
from fltk.util.task.arrival_task import DistributedArrivalTask, ArrivalTask, FederatedArrivalTask

if TYPE_CHECKING:
Expand Down Expand Up @@ -164,7 +164,7 @@ def __monitor_pods(self) -> None:
self._logger.debug(self._resource_lookup)


class ClusterManager(metaclass=Singleton):
class ClusterManager(abc.ABC):
"""
Object with basic monitoring functionality. This shows how the information of different Pods in a cluster can be
requested and parsed. Currently, it mainly exists to start the ResourceWatchDog, which now only keeps track of the
Expand Down
30 changes: 28 additions & 2 deletions fltk/util/config/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,17 @@


def retrieve_config_network_params(conf: FedLearnerConfig, nic=None, host=None):
"""
Args:
conf: FedLearnerConfig:
nic: (Default value = None)
host: (Default value = None)
Returns:
str: NIC to use.
str: host to use.
"""
if hasattr(conf, 'system'):
system_attr = getattr(conf, 'system')
if 'federator' in system_attr:
Expand All @@ -22,6 +33,15 @@ def retrieve_config_network_params(conf: FedLearnerConfig, nic=None, host=None):


def get_distributed_config(args, alt_path: str = None) -> Optional[DistributedConfig]:
"""
Args:
args:
alt_path: str: (Default value = None)
Returns:
Optional[DistributedConfig]: When provided, DistributedConfig from Path specified during startup.
"""
if args:
config_path = args.config
else:
Expand All @@ -39,8 +59,14 @@ def get_distributed_config(args, alt_path: str = None) -> Optional[DistributedCo


def get_learning_param_config(args, alt_path: str = None) -> Optional[DistLearnerConfig]:
"""
Retrieve learning parameter configuration from Disk for distributed learning experiments.
"""Retrieve learning parameter configuration from Disk for distributed learning experiments.
Args:
args:
alt_path: str: (Default value = None)
Returns:
"""
if args:
config_path = args.experiment_config
Expand Down
140 changes: 81 additions & 59 deletions fltk/util/config/arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,30 @@
import torch.distributed as dist


def _create_extractor_parser(subparsers):
def _create_extractor_parser(subparsers) -> None:
"""
Helper function to add extractor arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
Args:
subparsers (Any): Subparser to add arguments to.
Returns:
None
"""
extractor_parser = subparsers.add_parser('extractor')
extractor_parser.add_argument('config', type=str)


def _create_client_parser(subparsers) -> None:
"""
Helper function to add client arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add client arguments.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None
"""
client_parser = subparsers.add_parser('client')
client_parser.add_argument('experiment_config', type=str, help="Experiment specific config (yaml).")
Expand All @@ -34,12 +39,14 @@ def _create_client_parser(subparsers) -> None:


def _create_cluster_parser(subparsers) -> None:
"""
Helper function to add cluster execution arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add cluster execution arguments.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None
"""
cluster_parser = subparsers.add_parser('cluster')
cluster_parser.add_argument('config', type=str)
Expand All @@ -48,50 +55,58 @@ def _create_cluster_parser(subparsers) -> None:


def _create_container_util_parser(subparsers) -> None:
"""
Helper function to add container util execution arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add container util execution arguments.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None
"""
util_docker_parser = subparsers.add_parser('util-docker')
util_docker_parser.add_argument('name', type=str)
util_docker_parser.add_argument('--clients', type=int)


def _create_util_parser(subparsers):
"""
Helper function to add util generation execution arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add util generation execution arguments.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None: None
"""
util_generate_parser = subparsers.add_parser('util-generate')
util_generate_parser.add_argument('path', type=str)


def _create_util_run_parser(subparsers) -> None:
"""
Helper function to add util run execution arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add util run execution arguments.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None: None
"""
util_run_parser = subparsers.add_parser('util-run')
util_run_parser.add_argument('path', type=str)


def _create_remote_parser(subparsers) -> None:
"""
Helper function to add remote Federated Learning execution arguments. Supports both Docker and K8s execution
"""Helper function to add remote Federated Learning execution arguments. Supports both Docker and K8s execution
using optional (positional) arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None: None
"""
remote_parser = subparsers.add_parser('remote')
add_default_arguments(remote_parser)
Expand All @@ -102,37 +117,44 @@ def _create_remote_parser(subparsers) -> None:


def _create_single_parser(subparsers) -> None:
"""
Helper function to add Local single machine execution arguments.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add Local single machine execution arguments.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
None: None
"""
single_machine_parser = subparsers.add_parser('single')
add_default_arguments(single_machine_parser)


def add_default_arguments(*parsers):
"""
Helper function to add default arguments shared between executions.
@param parsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: None
"""Helper function to add default arguments shared between executions.
Args:
parsers: Subparser to add arguments to.
*parsers:
Returns:
None: None
"""
for parser in parsers:
parser.add_argument('config', type=str, help='')
parser.add_argument('--prefix', type=str, default=None)


def create_all_subparsers(subparsers):
"""
Helper function to add all subparsers to an argparse object.
@param subparsers: Subparser to add arguments to.
@type subparsers: Any
@return: None
@rtype: ArgumentParser
"""Helper function to add all subparsers to an argparse object.
Args:
subparsers(Any): Subparser to add arguments to.
Returns:
ArgumentParser: None
"""
_create_extractor_parser(subparsers)
_create_client_parser(subparsers)
Expand Down
1 change: 1 addition & 0 deletions fltk/util/config/definitions/aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

@unique
class Aggregations(Enum):
"""Enum for Provided aggregation Types."""
avg = 'Avg'
fedavg = 'FedAvg'
sum = 'Sum'
1 change: 1 addition & 0 deletions fltk/util/config/definitions/data_sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

@unique
class DataSampler(Enum):
"""Enum for provided datasampler (Federated) Types."""
uniform = "uniform"
q_sampler = "q sampler"
limit_labels = "limit labels"
Expand Down
32 changes: 23 additions & 9 deletions fltk/util/config/definitions/dataset.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,44 @@
from typing import Type

from aenum import unique, Enum
from re import T

from fltk import datasets
from fltk.datasets import CIFAR10Dataset, CIFAR100Dataset, FashionMNISTDataset, MNIST


@unique
class Dataset(Enum):
"""Enum for provided dataset Types."""
cifar10 = 'cifar10'
cifar100 = 'cifar100'
fashion_mnist = 'fashion-mnist'
mnist = 'mnist'

@classmethod
def _missing_name_(cls, name: str) -> T:
def _missing_name_(cls, name: str) -> "Dataset":
"""Helper function in case name could not be looked up (to support older configurations).
Args:
name (str): Name of Type to be looked up.
Returns:
Dataset: Corresponding Enum instance, if name is recognized from lower case.
"""
for member in cls:
if member.name.lower() == name.lower():
return member


def get_dist_dataset(name: Dataset):
"""
Function to retrieve distributed dataset (Distributed Learning Experiment).
@param name: Definition name of the datset.
@type name: Dataset
@return:
@rtype:
def get_dist_dataset(name: Dataset) -> Type[datasets.Dataset]:
"""Function to retrieve (distributed) dataset, for Distributed Learning Experiments.
Args:
name (Dataset): Definition (Enum) of the dataset configurated.
Returns:
Type[datasets.Dataset]: Class reference to requested dataset.
"""
__lookup = {
Dataset.cifar10: CIFAR10Dataset,
Expand Down
1 change: 1 addition & 0 deletions fltk/util/config/definitions/experiment_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,6 @@


class ExperimentType(Enum):
""" """
FEDERATED = 'federated'
DISTRIBUTED = 'distributed'
1 change: 1 addition & 0 deletions fltk/util/config/definitions/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@


class LogLevel(Enum):
""" """
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
Expand Down
Loading

0 comments on commit 4e451db

Please sign in to comment.