Skip to content

Commit

Permalink
draft docs and examples for visualizing predictions (#102)
Browse files Browse the repository at this point in the history
* visualize example tutorial

* fix links

* change gif

* add imports in howtos.md
  • Loading branch information
YilingQiao authored Oct 14, 2020
1 parent 0375783 commit 8efd4d3
Show file tree
Hide file tree
Showing 10 changed files with 295 additions and 70 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ the scripts in [`scripts/download_datasets`](https://github.com/intel-isl/Open3D

## How-tos

* [Visualize network predictions](docs/howtos.md#visualize-network-predictions)
* [Visualize custom data](docs/howtos.md#visualize-custom-data)
* [Adding a new model](docs/howtos.md#adding-a-new-model)
* [Adding a new dataset](docs/howtos.md#adding-a-new-dataset)
Expand Down
64 changes: 64 additions & 0 deletions docs/howtos.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,70 @@
This page is an effort to give short examples for common tasks and will be
extended over time.

## Visualize network predictions
Users can inspect the prediction results using the visualizer. Run `python examples/vis_pred.py` to see an example.

First, initialize a `Visualizer` and set up `LabelLUT` as label names to visualize. Here we would like to visualize points from `SemanticKITTI`. The labels can be obtained by `get_label_to_names()`
```python
from ml3d.vis import Visualizer, LabelLUT
from ml3d.datasets import SemanticKITTI

kitti_labels = SemanticKITTI.get_label_to_names()
v = Visualizer()
lut = LabelLUT()
for val in sorted(kitti_labels.keys()):
lut.add_label(kitti_labels[val], val)
v.set_lut("labels", lut)
v.set_lut("pred", lut)
```

Second, we will construct the networks and pipelines, load the pretrained weights, and prepare the data to be visualized.
```python
from ml3d.torch.pipelines import SemanticSegmentation
from ml3d.torch.models import RandLANet, KPFCNN

kpconv_url = "https://storage.googleapis.com/open3d-releases/model-zoo/kpconv_semantickitti_202009090354utc.pth"
randlanet_url = "https://storage.googleapis.com/open3d-releases/model-zoo/randlanet_semantickitti_202009090354utc.pth"

ckpt_path = "./logs/vis_weights_{}.pth".format('RandLANet')
if not exists(ckpt_path):
cmd = "wget {} -O {}".format(randlanet_url, ckpt_path)
os.system(cmd)
model = RandLANet(ckpt_path=ckpt_path)
pipeline_r = SemanticSegmentation(model)
pipeline_r.load_ckpt(model.cfg.ckpt_path)

ckpt_path = "./logs/vis_weights_{}.pth".format('KPFCNN')
if not exists(ckpt_path):
cmd = "wget {} -O {}".format(kpconv_url, ckpt_path)
print(cmd)
os.system(cmd)
model = KPFCNN(ckpt_path=ckpt_path, in_radius=10)
pipeline_k = SemanticSegmentation(model)
pipeline_k.load_ckpt(model.cfg.ckpt_path)

data_path = os.path.dirname(os.path.realpath(__file__)) + "/demo_data"
pc_names = ["000700", "000750"]

# see this function in examples/vis_pred.py,
# or it can be your customized dataloader,
# or you can use the exsisting get_data() methods in ml3d/datasets
pcs = get_custom_data(pc_names, data_path)
```

Third, we can run the inference and collect the results and send the results to `Visualizer.visualize(list_of_pointclouds_to_visualize)`. Note that the input to `visualize()` visualize is a list of point clouds and their predictions. Each point cloud is a dictionary like,
```python
vis_d = {
"name": name,
"points": pts, # n x 3
"labels": label, # n
"pred": pred_label, # n
}
```
You will give its `name` and `points`. Other entries can be customized. For example, we can visualize its ground truth `label` and our prediction `pred` on a point cloud.

Here is the result by running `python examples/vis_pred.py`,
![Visualize prediction GIF](images/visualizer_predictions.gif)

## Visualize custom data

Expand Down
Binary file added docs/images/visualizer_predictions.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
125 changes: 125 additions & 0 deletions examples/vis_pred.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
#!/usr/bin/env python
from ml3d.datasets import ParisLille3D
from ml3d.datasets import S3DIS
from ml3d.datasets import Semantic3D
from ml3d.datasets import SemanticKITTI
from ml3d.datasets import Toronto3D
from ml3d.vis import Visualizer, LabelLUT
from ml3d.utils import get_module

import argparse
import math
import numpy as np
import os
import random
import sys
import tensorflow as tf
import torch
from os.path import exists, join, isfile, dirname, abspath, split


def get_custom_data(pc_names, path):

pc_data = []
for i, name in enumerate(pc_names):
pc_path = join(path, 'points', name + '.npy')
label_path = join(path, 'labels', name + '.npy')
point = np.load(pc_path)[:, 0:3]
label = np.squeeze(np.load(label_path))

data = {
'point': point,
'feat': None,
'label': label,
}
pc_data.append(data)

return pc_data


def pred_custom_data(pc_names, pcs, pipeline_r, pipeline_k):
vis_points = []
for i, data in enumerate(pcs):
name = pc_names[i]

results_r = pipeline_r.run_inference(data)
pred_label_r = (results_r['predict_labels'] + 1).astype(np.int32)
pred_label_r[0] = 0

results_k = pipeline_k.run_inference(data)
pred_label_k = (results_k['predict_labels'] + 1).astype(np.int32)
pred_label_k[0] = 0

label = data['label']
pts = data['point']

vis_d = {
"name": name,
"points": pts,
"labels": label,
"pred": pred_label_k,
}
vis_points.append(vis_d)

vis_d = {
"name": name + "_randlanet",
"points": pts,
"labels": pred_label_r,
}
vis_points.append(vis_d)

vis_d = {
"name": name + "_kpconv",
"points": pts,
"labels": pred_label_k,
}
vis_points.append(vis_d)

return vis_points


# ------------------------------

from ml3d.torch.pipelines import SemanticSegmentation
from ml3d.torch.models import RandLANet, KPFCNN


def main():
kitti_labels = SemanticKITTI.get_label_to_names()
v = Visualizer()
lut = LabelLUT()
for val in sorted(kitti_labels.keys()):
lut.add_label(kitti_labels[val], val)
v.set_lut("labels", lut)
v.set_lut("pred", lut)

kpconv_url = "https://storage.googleapis.com/open3d-releases/model-zoo/kpconv_semantickitti_202009090354utc.pth"
randlanet_url = "https://storage.googleapis.com/open3d-releases/model-zoo/randlanet_semantickitti_202009090354utc.pth"

ckpt_path = "./logs/vis_weights_{}.pth".format('RandLANet')
if not exists(ckpt_path):
cmd = "wget {} -O {}".format(randlanet_url, ckpt_path)
os.system(cmd)
model = RandLANet(ckpt_path=ckpt_path)
pipeline_r = SemanticSegmentation(model)
pipeline_r.load_ckpt(model.cfg.ckpt_path)

ckpt_path = "./logs/vis_weights_{}.pth".format('KPFCNN')
if not exists(ckpt_path):
cmd = "wget {} -O {}".format(kpconv_url, ckpt_path)
print(cmd)
os.system(cmd)
model = KPFCNN(ckpt_path=ckpt_path, in_radius=10)
pipeline_k = SemanticSegmentation(model)
pipeline_k.load_ckpt(model.cfg.ckpt_path)

data_path = os.path.dirname(os.path.realpath(__file__)) + "/demo_data"
pc_names = ["000700", "000750"]
pcs = get_custom_data(pc_names, data_path)
pcs_with_pred = pred_custom_data(pc_names, pcs, pipeline_r, pipeline_k)

v.visualize(pcs_with_pred)


if __name__ == "__main__":
main()
10 changes: 10 additions & 0 deletions ml3d/datasets/base_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,16 @@ def __init__(self, **kwargs):
self.cfg = Config(kwargs)
self.name = self.cfg.name

@staticmethod
@abstractmethod
def get_label_to_names():
"""Returns a label to names dict.
Returns:
A dict where keys are label numbers and
vals are the corresponding names.
"""

@abstractmethod
def get_split(self, split):
"""Returns a dataset split.
Expand Down
29 changes: 17 additions & 12 deletions ml3d/datasets/parislille3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,7 @@ def __init__(self,

cfg = self.cfg

self.label_to_names = {
0: 'unclassified',
1: 'ground',
2: 'building',
3: 'pole-road_sign-traffic_light',
4: 'bollard-small_pole',
5: 'trash_can',
6: 'barrier',
7: 'pedestrian',
8: 'car',
9: 'natural-vegetation'
}
self.label_to_names = self.get_label_to_names()

self.num_classes = len(self.label_to_names)
self.label_values = np.sort([k for k, v in self.label_to_names.items()])
Expand All @@ -88,6 +77,22 @@ def __init__(self,
test_path = cfg.dataset_path + "/test_10_classes/"
self.test_files = glob.glob(test_path + '*.ply')

@staticmethod
def get_label_to_names():
label_to_names = {
0: 'unclassified',
1: 'ground',
2: 'building',
3: 'pole-road_sign-traffic_light',
4: 'bollard-small_pole',
5: 'trash_can',
6: 'barrier',
7: 'pedestrian',
8: 'car',
9: 'natural-vegetation'
}
return label_to_names

def get_split(self, split):
return ParisLille3DSplit(self, split=split)

Expand Down
35 changes: 20 additions & 15 deletions ml3d/datasets/s3dis.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,21 +61,7 @@ def __init__(self,

cfg = self.cfg

self.label_to_names = {
0: 'ceiling',
1: 'floor',
2: 'wall',
3: 'beam',
4: 'column',
5: 'window',
6: 'door',
7: 'table',
8: 'chair',
9: 'sofa',
10: 'bookcase',
11: 'board',
12: 'clutter'
}
self.label_to_names = self.get_label_to_names()
self.num_classes = len(self.label_to_names)
self.label_values = np.sort([k for k, v in self.label_to_names.items()])
self.label_to_idx = {l: i for i, l in enumerate(self.label_values)}
Expand All @@ -94,6 +80,25 @@ def __init__(self,
self.all_files = glob.glob(
str(Path(self.cfg.dataset_path) / 'original_ply' / '*.ply'))

@staticmethod
def get_label_to_names():
label_to_names = {
0: 'ceiling',
1: 'floor',
2: 'wall',
3: 'beam',
4: 'column',
5: 'window',
6: 'door',
7: 'table',
8: 'chair',
9: 'sofa',
10: 'bookcase',
11: 'board',
12: 'clutter'
}
return label_to_names

def get_split(self, split):
return S3DISSplit(self, split=split)

Expand Down
27 changes: 16 additions & 11 deletions ml3d/datasets/semantic3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,17 +64,7 @@ def __init__(self,

cfg = self.cfg

self.label_to_names = {
0: 'unlabeled',
1: 'man-made terrain',
2: 'natural terrain',
3: 'high vegetation',
4: 'low vegetation',
5: 'buildings',
6: 'hard scape',
7: 'scanning artefacts',
8: 'cars'
}
self.label_to_names = self.get_label_to_names()
self.num_classes = len(self.label_to_names)
self.label_values = np.sort([k for k, v in self.label_to_names.items()])
self.label_to_idx = {l: i for i, l in enumerate(self.label_values)}
Expand Down Expand Up @@ -103,6 +93,21 @@ def __init__(self,
self.train_files = np.sort(
[f for f in self.train_files if f not in self.val_files])

@staticmethod
def get_label_to_names():
label_to_names = {
0: 'unlabeled',
1: 'man-made terrain',
2: 'natural terrain',
3: 'high vegetation',
4: 'low vegetation',
5: 'buildings',
6: 'hard scape',
7: 'scanning artefacts',
8: 'cars'
}
return label_to_names

def get_split(self, split):
return Semantic3DSplit(self, split=split)

Expand Down
Loading

0 comments on commit 8efd4d3

Please sign in to comment.