forked from Fried-Rice-Lab/FriedRiceLab
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinfer.py
63 lines (51 loc) · 2.34 KB
/
infer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# --------------------------------------------------------------------------------
# Infer your own images on a specified model on a specified task.
#
# Implemented by Jinpeng Shi (https://github.com/jinpeng-s)
# --------------------------------------------------------------------------------
import logging
from os import path as osp
import torch
from basicsr.data import build_dataloader, build_dataset
from basicsr.models import build_model
from basicsr.utils import get_env_info, get_root_logger, get_time_str
from basicsr.utils.options import dict2str
import archs # noqa
import data # noqa
import models # noqa
from utils import parse_options, make_exp_dirs
def infer_pipeline(root_path):
# parse options, set distributed setting, set ramdom seed
opt, _ = parse_options(root_path, is_train=False)
torch.backends.cudnn.benchmark = True
# torch.backends.cudnn.deterministic = True
# mkdir and initialize loggers
make_exp_dirs(opt)
log_file = osp.join(opt['path']['log'], f"infer_{opt['name']}_{get_time_str()}.log")
logger = get_root_logger(logger_name='basicsr', log_level=logging.INFO, log_file=log_file)
logger.info(get_env_info())
logger.info(dict2str(opt))
# create infer dataset and dataloader
infer_loaders = []
for _, dataset_opt in sorted(opt['infer_datasets'].items()):
dataset_opt['dataroot_lq'] = dataset_opt['dataroot_gt'] # fix it
dataset_opt['phase'] = 'val'
dataset_opt['bit'] = opt['bit']
dataset_opt['scale'] = opt['scale']
infer_set = build_dataset(dataset_opt)
infer_loader = build_dataloader(
infer_set, dataset_opt, num_gpu=opt['num_gpu'], dist=opt['dist'], sampler=None, seed=opt['manual_seed'])
logger.info(f"Number of infer images in {dataset_opt['name']}: {len(infer_set)}")
infer_loaders.append(infer_loader)
# create model
model = build_model(opt)
for infer_loader in infer_loaders:
infer_set_name = infer_loader.dataset.opt['name']
logger.info(f'Inferring {infer_set_name}...')
model.nondist_inference(infer_loader)
logger.info(
f"Inference ended. The results are saved to "
f"{osp.join('results', opt['name'], 'visualization', 'inference')}.")
if __name__ == '__main__':
root_path = osp.abspath(osp.join(__file__, osp.pardir))
infer_pipeline(root_path)