Skip to content

Commit

Permalink
Clean up config object loading and code duplication
Browse files Browse the repository at this point in the history
  • Loading branch information
JMGaljaard committed Sep 6, 2022
1 parent 58ad4f9 commit e90d136
Showing 1 changed file with 4 additions and 9 deletions.
13 changes: 4 additions & 9 deletions fltk/util/config/__init__.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
from __future__ import annotations

import json
from pathlib import Path
from typing import Optional, Union, Type, Dict

import torch
import yaml
import logging

from torch.nn.modules.loss import _Loss

from fltk.util.config.definitions import Loss

from fltk.util.config.distributed_config import DistributedConfig
Expand Down Expand Up @@ -45,15 +40,15 @@ def get_distributed_config(args, alt_path: str = None) -> Optional[DistributedCo


def get_learning_param_config(args, alt_path: str = None) -> Optional[DistLearningConfig]:
"""
Retrieve learning parameter configuration from Disk for distributed learning experiments.
"""
if args:
config_path = args.experiment_config
else:
config_path = alt_path
safe_loader = get_safe_loader()
try:
with open(config_path) as f:
learning_params_dict = yaml.load(f, Loader=safe_loader)
learning_params = DistLearningConfig.from_dict(learning_params_dict)
learning_params = DistLearningConfig.from_yaml(Path(config_path))
except Exception as e:
msg = f"Failed to get learning parameter configuration for distributed experiments: {e}"
logging.info(msg)
Expand Down

0 comments on commit e90d136

Please sign in to comment.