forked from chenzhaiyu/polygnn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathremap.py
101 lines (83 loc) · 3.6 KB
/
remap.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
"""
Remap normalized instances to global CRS.
"""
import glob
from pathlib import Path
import multiprocessing
import logging
import hydra
from omegaconf import DictConfig
from tqdm import tqdm
from utils import reverse_normalise_mesh, reverse_normalise_cloud, normalise_mesh
logger = logging.getLogger("trimesh")
logger.setLevel(logging.WARNING)
@hydra.main(config_path='./conf', config_name='config', version_base='1.2')
def normalize_meshes(cfg: DictConfig):
"""
Normalize meshes.
cfg: DictConfig
Hydra configuration
"""
args = []
input_filenames = glob.glob(f'{cfg.output_dir}/*.obj')
output_dir = Path(cfg.output_dir) / 'normalized'
output_dir.mkdir(exist_ok=True)
for input_filename in input_filenames:
base_filename = Path(input_filename).name
reference_filename = (Path(cfg.reference_dir) / base_filename).with_suffix('.obj')
output_filename = output_dir / base_filename
args.append((input_filename, reference_filename, output_filename, 'scene', cfg.reconstruct.offset, True, False))
print('start processing')
with multiprocessing.Pool(processes=cfg.num_workers if cfg.num_workers else multiprocessing.cpu_count()) as pool:
# call with multiprocessing
for _ in tqdm(pool.imap_unordered(normalise_mesh, args), desc='Normalizing meshes', total=len(args)):
pass
# normalize clouds as meshes
normalize_clouds = normalize_meshes
@hydra.main(config_path='./conf', config_name='config', version_base='1.2')
def remap_meshes(cfg: DictConfig):
"""
Remap normalized buildings to global CRS.
Parameters
----------
cfg: DictConfig
Hydra configuration
"""
args = []
input_filenames = glob.glob(cfg.output_dir + '/*.obj')
output_dir = Path(cfg.output_dir) / 'global'
output_dir.mkdir(exist_ok=True)
for input_filename in input_filenames:
base_filename = Path(input_filename).name
reference_filename = Path(cfg.reference_dir) / base_filename
output_filename = output_dir / base_filename
args.append((input_filename, reference_filename, output_filename, 'scene', cfg.reconstruct.offset, cfg.reconstruct.scale, cfg.reconstruct.translate))
with multiprocessing.Pool(processes=cfg.num_workers if cfg.num_workers else multiprocessing.cpu_count()) as pool:
# call with multiprocessing
for _ in tqdm(pool.imap_unordered(reverse_normalise_mesh, args), desc='Remapping meshes', total=len(args)):
pass
@hydra.main(config_path='./conf', config_name='config', version_base='1.2')
def remap_clouds(cfg: DictConfig):
"""
Remap normalized point clouds to global CRS.
Parameters
----------
cfg: DictConfig
Hydra configuration
"""
args = []
input_filenames = glob.glob(f'{cfg.data_dir}/raw/test_cloud_normalised_ply/*.ply')
output_dir = Path(cfg.output_dir) / 'global_clouds'
output_dir.mkdir(exist_ok=True)
for input_filename in input_filenames:
base_filename = Path(input_filename).name
reference_filename = (Path(cfg.reference_dir) / base_filename).with_suffix('.obj')
output_filename = output_dir / base_filename
args.append((input_filename, reference_filename, output_filename, 'scene', cfg.reconstruct.offset))
print('start processing')
with multiprocessing.Pool(processes=cfg.num_workers if cfg.num_workers else multiprocessing.cpu_count()) as pool:
# call with multiprocessing
for _ in tqdm(pool.imap_unordered(reverse_normalise_cloud, args), desc='Remapping clouds', total=len(args)):
pass
if __name__ == '__main__':
remap_meshes()