Skip to content

Commit

Permalink
Connect flatten, conv2d, and maxpool2d layers in backward pass (#…
Browse files Browse the repository at this point in the history
…142)

* Connect flatten, conv2d, and maxpool2d layers in backward pass

* Bump minor version
  • Loading branch information
milancurcic authored Jun 22, 2023
1 parent 31fc061 commit 6bbc28d
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 6 deletions.
2 changes: 1 addition & 1 deletion fpm.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name = "neural-fortran"
version = "0.12.0"
version = "0.13.0"
license = "MIT"
author = "Milan Curcic"
maintainer = "[email protected]"
Expand Down
16 changes: 11 additions & 5 deletions src/nf/nf_network_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,6 @@ end function get_activation_by_name
pure module subroutine backward(self, output)
class(network), intent(in out) :: self
real, intent(in) :: output(:)
real, allocatable :: gradient(:)
integer :: n, num_layers

num_layers = size(self % layers)
Expand All @@ -296,18 +295,25 @@ pure module subroutine backward(self, output)
! Output layer; apply the loss function
select type(this_layer => self % layers(n) % p)
type is(dense_layer)
gradient = quadratic_derivative(output, this_layer % output)
call self % layers(n) % backward( &
self % layers(n - 1), &
quadratic_derivative(output, this_layer % output) &
)
end select
else
! Hidden layer; take the gradient from the next layer
select type(next_layer => self % layers(n + 1) % p)
type is(dense_layer)
gradient = next_layer % gradient
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
type is(flatten_layer)
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
type is(conv2d_layer)
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
type is(maxpool2d_layer)
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
end select
end if

call self % layers(n) % backward(self % layers(n - 1), gradient)

end do

end subroutine backward
Expand Down

0 comments on commit 6bbc28d

Please sign in to comment.