forked from modern-fortran/neural-fortran
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnf_layer_constructors.f90
171 lines (155 loc) · 5.63 KB
/
nf_layer_constructors.f90
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
module nf_layer_constructors
!! This module provides the functions to instantiate specific layers.
use nf_layer, only: layer
use nf_activation, only : activation_function
implicit none
private
public :: conv2d, dense, flatten, input, maxpool2d, reshape
interface input
pure module function input1d(layer_size) result(res)
!! 1-d input layer constructor.
!!
!! This layer is for inputting 1-d data to the network.
!! Currently, this layer must be followed by a dense layer.
!! An input layer must be the first layer in the network.
!!
!! This is a specific function that is available
!! under a generic name `input`.
!!
!! Example:
!!
!! ```
!! use nf, only :: input, layer
!! type(layer) :: input_layer
!! input_layer = input(768)
!! ```
integer, intent(in) :: layer_size
!! Size of the input layer
type(layer) :: res
!! Resulting layer instance
end function input1d
pure module function input3d(layer_shape) result(res)
!! 3-d input layer constructor.
!!
!! This layer is for inputting 3-d data to the network.
!! Currently, this layer must be followed by a conv2d layer.
!! An input layer must be the first layer in the network.
!!
!! This is a specific function that is available
!! under a generic name `input`.
!!
!! Example:
!!
!! ```
!! use nf, only :: input, layer
!! type(layer) :: input_layer
!! input_layer = input([28, 28, 1])
!! ```
integer, intent(in) :: layer_shape(3)
!! Shape of the input layer
type(layer) :: res
!! Resulting layer instance
end function input3d
end interface input
interface
pure module function dense(layer_size, activation) result(res)
!! Dense (fully-connected) layer constructor.
!!
!! This layer is a building block for dense, fully-connected networks,
!! or for an output layer of a convolutional network.
!! A dense layer must not be the first layer in the network.
!!
!! Example:
!!
!! ```
!! use nf, only :: dense, layer, relu
!! type(layer) :: dense_layer
!! dense_layer = dense(10)
!! dense_layer = dense(10, activation=relu())
!! ```
integer, intent(in) :: layer_size
!! The number of neurons in a dense layer
class(activation_function), intent(in), optional :: activation
!! Activation function instance (default sigmoid)
type(layer) :: res
!! Resulting layer instance
end function dense
pure module function flatten() result(res)
!! Flatten (3-d -> 1-d) layer constructor.
!!
!! Use this layer to chain layers with 3-d outputs to layers with 1-d
!! inputs. For example, to chain a `conv2d` or a `maxpool2d` layer
!! with a `dense` layer for a CNN for classification, place a `flatten`
!! layer between them.
!!
!! A flatten layer must not be the first layer in the network.
!!
!! Example:
!!
!! ```
!! use nf, only :: flatten, layer
!! type(layer) :: flatten_layer
!! flatten_layer = flatten()
!! ```
type(layer) :: res
!! Resulting layer instance
end function flatten
pure module function conv2d(filters, kernel_size, activation) result(res)
!! 2-d convolutional layer constructor.
!!
!! This layer is for building 2-d convolutional network.
!! Although the established convention is to call these layers 2-d,
!! the shape of the data is actuall 3-d: image width, image height,
!! and the number of channels.
!! A conv2d layer must not be the first layer in the network.
!!
!! Example:
!!
!! ```
!! use nf, only :: conv2d, layer
!! type(layer) :: conv2d_layer
!! conv2d_layer = dense(filters=32, kernel_size=3)
!! conv2d_layer = dense(filters=32, kernel_size=3, activation='relu')
!! ```
integer, intent(in) :: filters
!! Number of filters in the output of the layer
integer, intent(in) :: kernel_size
!! Width of the convolution window, commonly 3 or 5
class(activation_function), intent(in), optional :: activation
!! Activation function (default sigmoid)
type(layer) :: res
!! Resulting layer instance
end function conv2d
pure module function maxpool2d(pool_size, stride) result(res)
!! 2-d maxpooling layer constructor.
!!
!! This layer is for downscaling other layers, typically `conv2d`.
!!
!! Example:
!!
!! ```
!! use nf, only :: maxpool2d, layer
!! type(layer) :: maxpool2d_layer
!! maxpool2d_layer = maxpool2d(pool_size=2)
!! maxpool2d_layer = maxpool2d(pool_size=2, stride=3)
!! ```
integer, intent(in) :: pool_size
!! Width of the pooling window, commonly 2
integer, intent(in), optional :: stride
!! Stride of the pooling window, commonly equal to `pool_size`;
!! Defaults to `pool_size` if omitted.
type(layer) :: res
!! Resulting layer instance
end function maxpool2d
pure module function reshape(output_shape) result(res)
!! Rank-1 to rank-any reshape layer constructor.
!! Currently implemented is only rank-3 for the output of the reshape.
!!
!! This layer is for connecting 1-d inputs to conv2d or similar layers.
integer, intent(in) :: output_shape(:)
!! Shape of the output
type(layer) :: res
!! Resulting layer instance
end function reshape
end interface
end module nf_layer_constructors