-
Notifications
You must be signed in to change notification settings - Fork 3
/
models.py
75 lines (66 loc) · 2.43 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import torch
from torch import nn
from typing import Optional, Tuple, List
class NeWRF(nn.Module):
def __init__(
self,
d_input: int = 3,
n_layers: int = 6,
d_filter: int = 128,
skip: Tuple[int] = (4,),
d_viewdirs: Optional[int] = None
):
super().__init__()
self.d_input = d_input
self.skip = skip
self.act = nn.functional.relu
self.d_viewdirs = d_viewdirs
self.d_freq = 0
# Create model layers
self.layers = nn.ModuleList(
[nn.Linear(self.d_input, d_filter)] +
[nn.Linear(d_filter + self.d_input, d_filter) if i in skip
else nn.Linear(d_filter, d_filter) for i in range(n_layers - 1)]
)
# Bottleneck layers
if self.d_viewdirs is not None:
self.alpha_out = nn.Linear(d_filter, 1)
self.realimag_filters = nn.Linear(d_filter, d_filter)
self.branch = nn.Linear(d_filter + self.d_viewdirs, d_filter // 2)
self.output = nn.Linear(d_filter // 2, 2)
else:
# If no viewdirs, use simpler output
self.output = nn.Linear(d_filter, 3)
def forward(
self,
x: torch.Tensor,
viewdirs: Optional[torch.Tensor] = None
) -> torch.Tensor:
r"""
Forward pass with optional view direction.
"""
# Cannot use viewdirs if instantiated with d_viewdirs = None
if self.d_viewdirs is None and viewdirs is not None:
raise ValueError(
'Cannot input x_direction if d_viewdirs was not given.')
# Apply forward pass up to bottleneck
x_input = x
for i, layer in enumerate(self.layers):
x = self.act(layer(x))
if i in self.skip:
x = torch.cat([x, x_input], dim=-1)
# Apply bottleneck
if self.d_viewdirs is not None:
# Split alpha from network output
alpha = self.alpha_out(x)
# Pass through bottleneck to get RGB
x = self.realimag_filters(x)
x = torch.concat([x, viewdirs], dim=-1)
x = self.act(self.branch(x))
x = self.output(x)
# Concatenate alphas to output
x = torch.concat([x, alpha], dim=-1)
else:
# Simple output
x = self.output(x)
return x