Skip to content

Commit

Permalink
Introduce initial changes to change defautl logging directory
Browse files Browse the repository at this point in the history
  • Loading branch information
JMGaljaard committed May 8, 2022
1 parent 9695e90 commit 0e29ba5
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 13 deletions.
10 changes: 3 additions & 7 deletions configs/federated_tasks/example_arrival_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
"memory": "1Gi"
},
"Worker": {
"cores": "750m",
"memory": "1Gi"
"cores": "2000m",
"memory": "2Gi"
}
}
},
Expand Down Expand Up @@ -64,11 +64,7 @@
},
"experimentConfiguration": {
"randomSeed": [
1,
41,
42,
43,
430
1
],
"workerReplication": {
"Master": 1,
Expand Down
1 change: 1 addition & 0 deletions experiments/node.jinja.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,4 @@ data_sampler_args: {{ task.get_sampler_args(tpe) }}
replication_id: {{ task.get_net_param('replication') }}
real_time: true
save_data_append: true
output_path: {{ experiment_path }}
26 changes: 22 additions & 4 deletions fltk/core/distributed/orchestrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,25 @@
__ENV = Environment(loader=FileSystemLoader(EXPERIMENT_DIR))


def _prepare_experiment_maps(task: FederatedArrivalTask, u_id, replication: int = 1) -> \
def _generate_experiment_path_name(task: ArrivalTask, u_id: str, config: DistributedConfig):
"""
Helper function to generate experiment name for logging without conflicts
@param task: Arrival task for Task realted information.
@type task: ArrivalTask
@param u_id: Unique identifier string corresponding to the experiment.
@type u_id: str
@param config: Distributed configuration for logging directory configuration.
@type config: DistributedConfig
@return: String representation of the logging path for a specific experiment.
@rtype: str
"""
log_dir = config.execution_config.log_path
experiment_name = f"{task.dataset}_{task.network}_{u_id}_{task.replication}"
full_path = f"{log_dir}/{experiment_name}"
return full_path


def _prepare_experiment_maps(task: FederatedArrivalTask, config: DistributedConfig, u_id: str, replication: int = 1) -> \
(OrderedDict[str, V1ConfigMap], OrderedDict[str, str]):
template = __ENV.get_template('node.jinja.yaml')
type_dict = collections.OrderedDict()
Expand All @@ -30,8 +48,8 @@ def _prepare_experiment_maps(task: FederatedArrivalTask, u_id, replication: int
name = str(f'{tpe}-{u_id}-{replication}').lower()
meta = V1ObjectMeta(name=name,
labels={'app.kubernetes.io/name': f"fltk.node.config.{tpe}"})
# TODO: Replication / seed information
filled_template = template.render(task=task, tpe=tpe, replication=replication)
exp_path = _generate_experiment_path_name(task, u_id, config)
filled_template = template.render(task=task, tpe=tpe, replication=replication, experiment_path=exp_path)
type_dict[tpe] = V1ConfigMap(data={'node.config.yaml': filled_template}, metadata=meta)
name_dict[tpe] = name
return type_dict, name_dict
Expand Down Expand Up @@ -171,7 +189,7 @@ def run_federated(self, clear: bool = True) -> None:
# Do blocking request to priority queue
curr_task = self.pending_tasks.get()
self.__logger.info(f"Scheduling arrival of Arrival: {curr_task.id}")
config_dict, configmap_name_dict = _prepare_experiment_maps(curr_task, curr_task.id, 1)
config_dict, configmap_name_dict = _prepare_experiment_maps(curr_task, self._config, curr_task.id, 1)
job_to_start = construct_job(self._config, curr_task, configmap_name_dict)

self.__create_config_maps(config_dict)
Expand Down
3 changes: 1 addition & 2 deletions fltk/util/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class Config:
# Save data in append mode. Thereby flushing on every append to file.
# This could be useful when a system is likely to crash midway an experiment
save_data_append: bool = False
output_path: Path = Path('output_test_2')
output_path: Path = Path('logging')

def __init__(self, **kwargs) -> None:
enum_fields = [x for x in self.__dataclass_fields__.items() if isinstance(x[1].type, Enum) or isinstance(x[1].type, EnumMeta)]
Expand All @@ -90,7 +90,6 @@ def __init__(self, **kwargs) -> None:
self.output_path = Path(value)
self.update_rng_seed()


def update_rng_seed(self):
torch.manual_seed(self.rng_seed)

Expand Down

0 comments on commit 0e29ba5

Please sign in to comment.