This repository was archived by the owner on Apr 16, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path04_01-mean.py
101 lines (80 loc) · 3.01 KB
/
04_01-mean.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
from ops import *
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from utils.dataloader import PatchesGen
from model.losses import WBCE
import time
import tensorflow as tf
import os
import json
import importlib
from multiprocessing import Pool
from multiprocessing import Process
from itertools import repeat
import sys
import logging
def mean_model(times, exp, img_type, test_cond, method):
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s:%(levelname)s:%(name)s:%(message)s',
filename='mean.log',
filemode='a'
)
log = logging.getLogger('foobar')
sys.stdout = StreamToLogger(log,logging.INFO)
sys.stderr = StreamToLogger(log,logging.ERROR)
tf.get_logger().setLevel('ERROR')
with open(f'experiments.json') as param_file:
params = json.load(param_file)
img_path = 'imgs'
n_opt_layer = 26 #number of OPT layers, used to split de input data between OPT and SAR
number_class = 3
weights = params['weights']
overlap = params['overlap']
patch_size = params['patch_size']
batch_size = params['batch_size']
nb_filters = params['nb_filters']
module = importlib.import_module('model.models')
exp_model = getattr(module, method)
grid_size = params['grid_size']
tiles_tr = params['tiles_tr']
tiles_val = params['tiles_val']
print(f'Mean Prediction Experiment {exp}')
print(f'Conditions: {method}_{img_type}_{test_cond}')
path_exp = os.path.join(img_path, 'experiments', f'exp_{exp}')
path_models = os.path.join(path_exp, 'models')
path_maps = os.path.join(path_exp, 'pred_maps')
if not os.path.exists(path_exp):
os.makedirs(path_exp)
if not os.path.exists(path_models):
os.makedirs(path_models)
if not os.path.exists(path_maps):
os.makedirs(path_maps)
p0 = np.load(os.path.join(path_maps, f'prob_0.npy')).astype(np.float32)
prob_rec = np.zeros((p0.shape[0],p0.shape[1], times))
for tm in range (0, times):
print(tm)
prob_rec[:,:,tm] = np.load(os.path.join(path_maps, f'prob_{tm}.npy')).astype(np.float32)
mean_prob = np.mean(prob_rec, axis = -1)
np.save(os.path.join(path_maps, f'prob_mean.npy'), mean_prob)
for tm in range (0, times):
os.remove(os.path.join(path_maps, f'prob_{tm}.npy'))
if __name__ == '__main__':
with open(f'experiments.json') as param_file:
params = json.load(param_file)
times=params['times']
exps = []
img_types = []
train_cond = []
test_cond = []
methods = []
for exp in params['experiments']:
exps.append(exp['num'])
img_types.append(exp['img_type'])
train_cond.append(exp['train_cond'])
test_cond.append(exp['test_cond'])
methods.append(exp['method'])
for i, exp in enumerate(exps):
p = Process(target=mean_model, args=(times, exp,img_types[i], test_cond[i], methods[i]))
p.start()
p.join()