-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlocal_mse.py
138 lines (101 loc) · 3.33 KB
/
local_mse.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
import warnings
import pandas as pd
import re, os, math
import numpy as np
import pickle
import torch
from torch import nn
from torchvision import datasets, transforms, models
from lib.data.dataset import InpaintingDataset
from lib.models import networks
import matplotlib.pyplot as plt
import skimage
from skimage.color import grey2rgb
import re
from operator import itemgetter
class LocalLoss(nn.Module):
def __init__(self, baseloss):
super().__init__()
self.loss = baseloss(reduction='none')
self.eps = eps
def forward(self,yhat,y,mask):
### calculate loss on masked region
loss = self.loss(y*mask,yhat*mask).sum()
affected_pixels = (mask != 0).float().sum()
### micro-averaging on affected pixels
loss = loss / affected_pixels
return loss
exp = 'wgan_rmse'
exp_root = f'/home/s2125048/thesis/model/{exp}/'
### ep paths
model_paths = []
for root,dirs,files in os.walk(exp_root):
for f in files:
if '.pt' in f:
d = {}
d['path'] = os.path.join(root,f)
d['ep'] = int(re.search('epoch\d+', f).group(0).split('epoch')[1])
model_paths.append(d)
new = sorted(model_paths, key=itemgetter('ep'))
new_paths = [p['path'] for p in new]
image_target_size = 128
dataset_path = "/home/s2125048/thesis/dataset/"
### construct training dataset
train_df = pd.read_csv(os.path.join(dataset_path,'csv/train_all_masks.csv'))
train_dataset = dataset.InpaintingDataset(dataset_path,dataframe=train_df,
transform=transforms.Compose([
transforms.Resize(image_target_size,),
transforms.ToTensor()]))
train_loader = torch.utils.data.DataLoader(
train_dataset,
batch_size=64,
num_workers=0,
shuffle=True
)
### construct test dataset
test_df = pd.read_csv(os.path.join(dataset_path,'csv/test_all_masks.csv'))
test_dataset = dataset.InpaintingDataset(dataset_path,dataframe=test_df,
transform=transforms.Compose([
transforms.Resize(image_target_size,),
transforms.ToTensor()]))
test_loader = torch.utils.data.DataLoader(
test_dataset,
batch_size=64,
num_workers=0,
shuffle=False
)
metric = {
'train': {},
'test': {}
}
mse_local_criterion = LocalLoss(nn.MSELoss)
for p in new_paths:
G_statedict = torch.load(p,map_location='cpu')
net_G = get_network('generator','unet').cpu()
net_G.load_state_dict(G_statedict)
ep = int(re.search('epoch\d+', curr_m_paths[0]).group(0).split('epoch')[-1])
print('epoch',p)
train_loss = 0
test_loss = 0
print("train")
b = 0
for ground,mask,_ in train_loader:
b+=1
mask = torch.ceil(mask)
masked = ground * (1-mask)
out = net_G(masked)
train_loss += mse_local_criterion(out,ground,mask)
train_loss = train_loss / b
print("test")
b = 0
for ground,mask,_ in test_loader:
b+=1
mask = torch.ceil(mask)
masked = ground * (1-mask)
out = net_G(masked)
test_loss += mse_local_criterion(out,ground,mask)
test_loss = test_loss / b
metric['train'][ep] = train_loss
metric['test'][ep] = test_loss
with open(os.path.join(exp_root,f'local_mse_{exp}.obj'),'wb') as handle:
pickle.dump(metric, handle, protocol=pickle.HIGHEST_PROTOCOL)