From 95579d0998e29090bdc6bbca3b8bec6eca897216 Mon Sep 17 00:00:00 2001 From: Gabry Date: Fri, 28 Jun 2024 14:23:18 +0200 Subject: [PATCH] fix_flake8 --- micromind/core.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/micromind/core.py b/micromind/core.py index 40e177c..53c64f6 100644 --- a/micromind/core.py +++ b/micromind/core.py @@ -32,7 +32,7 @@ "lr": 0.001, # this is ignored if you are overriding the configure_optimizers "debug": False, "log_wandb": False, - "wandb_resume": 'auto' # Resume run if prev crashed, otherwise new run. ["allow", "must", "never", "auto" or None] + "wandb_resume": "auto", # ["allow", "must", "never", "auto" or None] } @@ -384,7 +384,7 @@ def compute_macs(self, input_shape: Union[List, Tuple]): def on_train_start(self): """Initializes the optimizer, modules and puts the networks on the right - devices. Optionally loads checkpoint if already present. It also start wandb + devices. Optionally loads checkpoint if already present. It also start wandb logger if selected. This function gets executed at the beginning of every training. @@ -397,11 +397,11 @@ def on_train_start(self): import wandb self.wlog = wandb.init( - project=self.hparams.project_name, + project=self.hparams.project_name, name=self.hparams.experiment_name, resume=self.hparams.wandb_resume, id=self.hparams.experiment_name, - config=self.hparams + config=self.hparams, ) init_opt = self.configure_optimizers() @@ -580,8 +580,8 @@ def train( train_metrics.update({"train_loss": loss_epoch / (idx + 1)}) - if self.hparams.log_wandb: # wandb log train loss - self.wlog.log(train_metrics) + if self.hparams.log_wandb: # wandb log train loss + self.wlog.log(train_metrics) if "val" in datasets: val_metrics = self.validate() @@ -597,7 +597,7 @@ def train( else: val_metrics = train_metrics.update({"val_loss": loss_epoch / (idx + 1)}) - if self.hparams.log_wandb: # wandb log val loss + if self.hparams.log_wandb: # wandb log val loss self.wlog.log(val_metrics) if e >= 1 and self.debug: