forked from torch/nn
-
Notifications
You must be signed in to change notification settings - Fork 0
/
SpatialConvolution.lua
133 lines (117 loc) · 4.1 KB
/
SpatialConvolution.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
local SpatialConvolution, parent = torch.class('nn.SpatialConvolution', 'nn.Module')
function SpatialConvolution:__init(nInputPlane, nOutputPlane, kW, kH, dW, dH, padding)
parent.__init(self)
dW = dW or 1
dH = dH or 1
self.nInputPlane = nInputPlane
self.nOutputPlane = nOutputPlane
self.kW = kW
self.kH = kH
self.dW = dW
self.dH = dH
self.padding = padding or 0
self.weight = torch.Tensor(nOutputPlane, nInputPlane, kH, kW)
self.bias = torch.Tensor(nOutputPlane)
self.gradWeight = torch.Tensor(nOutputPlane, nInputPlane, kH, kW)
self.gradBias = torch.Tensor(nOutputPlane)
self:reset()
end
function SpatialConvolution:reset(stdv)
if stdv then
stdv = stdv * math.sqrt(3)
else
stdv = 1/math.sqrt(self.kW*self.kH*self.nInputPlane)
end
if nn.oldSeed then
self.weight:apply(function()
return torch.uniform(-stdv, stdv)
end)
self.bias:apply(function()
return torch.uniform(-stdv, stdv)
end)
else
self.weight:uniform(-stdv, stdv)
self.bias:uniform(-stdv, stdv)
end
end
local function backCompatibility(self)
self.finput = self.finput or self.weight.new()
self.fgradInput = self.fgradInput or self.weight.new()
self.padding = self.padding or 0
if self.weight:dim() == 2 then
self.weight = self.weight:view(self.nOutputPlane, self.nInputPlane, self.kH, self.kW)
end
if self.gradWeight and self.gradWeight:dim() == 2 then
self.gradWeight = self.gradWeight:view(self.nOutputPlane, self.nInputPlane, self.kH, self.kW)
end
end
local function makeContiguous(self, input, gradOutput)
if not input:isContiguous() then
self._input = self._input or input.new()
self._input:resizeAs(input):copy(input)
input = self._input
end
if gradOutput then
if not gradOutput:isContiguous() then
self._gradOutput = self._gradOutput or gradOutput.new()
self._gradOutput:resizeAs(gradOutput):copy(gradOutput)
gradOutput = self._gradOutput
end
end
return input, gradOutput
end
-- function to re-view the weight layout in a way that would make the MM ops happy
local function viewWeight(self)
self.weight = self.weight:view(self.nOutputPlane, self.nInputPlane * self.kH * self.kW)
if self.gradWeight and self.gradWeight:dim() > 0 then
self.gradWeight = self.gradWeight:view(self.nOutputPlane, self.nInputPlane * self.kH * self.kW)
end
end
local function unviewWeight(self)
self.weight = self.weight:view(self.nOutputPlane, self.nInputPlane, self.kH, self.kW)
if self.gradWeight and self.gradWeight:dim() > 0 then
self.gradWeight = self.gradWeight:view(self.nOutputPlane, self.nInputPlane, self.kH, self.kW)
end
end
function SpatialConvolution:updateOutput(input)
backCompatibility(self)
viewWeight(self)
input = makeContiguous(self, input)
local out = input.nn.SpatialConvolutionMM_updateOutput(self, input)
unviewWeight(self)
return out
end
function SpatialConvolution:updateGradInput(input, gradOutput)
if self.gradInput then
backCompatibility(self)
viewWeight(self)
input, gradOutput = makeContiguous(self, input, gradOutput)
local out = input.nn.SpatialConvolutionMM_updateGradInput(self, input, gradOutput)
unviewWeight(self)
return out
end
end
function SpatialConvolution:accGradParameters(input, gradOutput, scale)
backCompatibility(self)
input, gradOutput = makeContiguous(self, input, gradOutput)
viewWeight(self)
local out = input.nn.SpatialConvolutionMM_accGradParameters(self, input, gradOutput, scale)
unviewWeight(self)
return out
end
function SpatialConvolution:type(type)
self.finput = torch.Tensor()
self.fgradInput = torch.Tensor()
return parent.type(self,type)
end
function SpatialConvolution:__tostring__()
local s = string.format('%s(in: %d, out: %d, kW: %d, kH: %d', torch.type(self),
self.nInputPlane, self.nOutputPlane, self.kW, self.kH)
if self.dW ~= 1 or self.dH ~= 1 then
s = s .. string.format(', dW: %d, dH: %d', self.dW, self.dH)
end
if self.padding ~= 0 then
s = s .. ', padding: ' .. self.padding
end
return s .. ')'
end