From e30675629706a5eedecc098cc178c101e458fcda Mon Sep 17 00:00:00 2001 From: Bora Uyar Date: Sat, 6 Jul 2024 13:19:19 +0200 Subject: [PATCH] updates to trainer and monitoring --- flexynesis/__main__.py | 2 +- flexynesis/main.py | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/flexynesis/__main__.py b/flexynesis/__main__.py index e61a9f8..c1f938c 100644 --- a/flexynesis/__main__.py +++ b/flexynesis/__main__.py @@ -231,7 +231,7 @@ class AvailableModels(NamedTuple): string_organism=args.string_organism, string_node_name=args.string_node_name, downsample = args.subsample) - train_dataset, test_dataset = data_importer.import_data(force = True) + train_dataset, test_dataset = data_importer.import_data(force = False) if args.model_class == 'GNNEarly': # overlay datasets with network info diff --git a/flexynesis/main.py b/flexynesis/main.py index 3976dc2..b7f1f1f 100644 --- a/flexynesis/main.py +++ b/flexynesis/main.py @@ -1,4 +1,6 @@ from lightning import seed_everything +seed_everything(42, workers=True) + import torch from torch.utils.data import DataLoader, random_split import torch_geometric @@ -151,8 +153,11 @@ def setup_trainer(self, params, current_step, total_steps, full_train = False): mycallbacks.append(early_stop_callback) trainer = pl.Trainer( + #deterministic = True, precision = '16-mixed', # mixed precision training max_epochs=int(params['epochs']), + gradient_clip_val=1.0, + gradient_clip_algorithm='norm', log_every_n_steps=5, callbacks=mycallbacks, default_root_dir="./", @@ -274,6 +279,9 @@ def perform_tuning(self, hpo_patience = 0): if no_improvement_count >= hpo_patience & hpo_patience > 0: print(f"No improvement in best loss for {hpo_patience} iterations, stopping hyperparameter optimisation early.") break # Break out of the loop + best_params_dict = {param.name: value for param, value in zip(self.space, best_params)} if best_params else None + print(f"[INFO] current best val loss: {best_loss}; best params: {best_params_dict} since {no_improvement_count} hpo iterations") + # Convert best parameters from list to dictionary and include epochs best_params_dict = {param.name: value for param, value in zip(self.space, best_params)}