-
Notifications
You must be signed in to change notification settings - Fork 1
/
benchmark.py
145 lines (122 loc) · 3.89 KB
/
benchmark.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import yaml
from utils import ordered_yaml
import pickle
import random
import torch
import wandb
from trainers import (
GNNTrainer,
CausalGNNTrainer,
BaselinesTrainer
)
def benchmark_baselines(config):
# Initialize baseline models
with open(config["datasets"]["dataset_path"], 'rb') as inp:
unp = pickle.Unpickler(inp)
mimic3base = unp.load()
for method in [
# "DrAgent",
# "StageNet",
# "AdaCare",
# "Transformer",
# "RNN",
# "ConCare",
# "GRSAP",
# "Deepr",
# "MICRON",
# "GAMENet",
"MoleRec",
# "SafeDrug",
# "SparcNet",
]:
for task in [
# "readm",
# "mort_pred",
# "los",
"drug_rec"
]:
config["train"]["baseline_name"] = method
config["train"]["task"] = task
dataset_name = config["datasets"]["name"]
config["checkpoint"]["path"] = f"./checkpoints/{method}/{dataset_name}/{task}/"
print(f"Training {method} on task {task}")
trainer = BaselinesTrainer(config, mimic3base)
trainer.train()
del trainer
def benchmark_gnns(config):
# Load GNN configs
with open("./configs/GNN/GNN_MIMIC4_Configs.yml", mode='r') as f:
loader, _ = ordered_yaml()
gnn_config = yaml.load(f, loader)
for archi in [
# "GCN",
# "GAT",
"GIN",
"HetRGCN",
# "HGT"
]:
config["GNN"] = gnn_config[archi]
dataset_name = config["datasets"]["name"]
config["name"] = f"{archi}_MTCausal_MIMIC{dataset_name[-1]}_RMDL"
config["checkpoint"]["path"] = f"./checkpoints/GNN_ablation/{dataset_name}/{archi}/"
config["logging"]["tags"] += [archi]
trainer = CausalGNNTrainer(config)
trainer.train()
wandb.finish()
del trainer
def benchmark_dropouts(config):
for dp in [
0.1, 0.2, 0.3, 0.4, 0.5, 0.6
]:
config["GNN"]["feat_drop"] = dp
config["name"] = f"HGT_MTCausal_MIMIC3_RMDL_dp{dp}"
dataset_name = config["datasets"]["name"]
config["checkpoint"]["path"] = f"./checkpoints/Dropout_ablation/{dataset_name}/{dp}/"
config["logging"]["tags"] += ["abl_dropout"]
trainer = CausalGNNTrainer(config)
trainer.train()
wandb.finish()
del trainer
def benchmark_hidden_dim(config):
for dim in [
16, 32, 64, 128, 256
]:
config["GNN"]["hidden_dim"] = dim
config["name"] = f"HGT_MTCausal_MIMIC3_RMDL_dim{dim}"
dataset_name = config["datasets"]["name"]
config["checkpoint"]["path"] = f"./checkpoints/Hidden_Dim_ablation/{dataset_name}/{dim}/"
config["logging"]["tags"] += ["abl_dim"]
trainer = CausalGNNTrainer(config)
trainer.train()
wandb.finish()
del trainer
def benchmark_reg(config):
for reg in [
0.00005, 0.0001, 0.0005, 0.001, 0.01, 0.1, 1
]:
config["train"]["reg"] = reg
config["name"] = f"HGT_MTCausal_MIMIC3_RMDL_reg{reg}"
dataset_name = config["datasets"]["name"]
config["checkpoint"]["path"] = f"./checkpoints/Reg_Coeff_ablation/{dataset_name}/{reg}/"
config["logging"]["tags"] += ["abl_reg"]
trainer = CausalGNNTrainer(config)
trainer.train()
wandb.finish()
del trainer
# Set seed
seed = 611
random.seed(seed)
torch.manual_seed(seed)
# config_file = "Baselines_MIMIC4.yml"
config_file = "HGT_Causal_MIMIC3.yml"
config_path = f"./configs/{config_file}"
with open(config_path, mode='r') as f:
loader, _ = ordered_yaml()
config = yaml.load(f, loader)
print(f"Loaded configs from {config_path}")
if __name__ == "__main__":
# benchmark_baselines(config)
# benchmark_gnns(config)
# benchmark_dropouts(config)
benchmark_hidden_dim(config)
# benchmark_reg(config)