Applies a backward propagation through the network and returns the weight and bias gradients.
| Type | Intent | Optional | Attributes | Name | ||
|---|---|---|---|---|---|---|
| class(network_type), | intent(inout) | :: | self | |||
| real(kind=rk), | intent(in) | :: | y(:) | |||
| type(array2d), | intent(out), | allocatable | :: | dw(:) | ||
| type(array1d), | intent(out), | allocatable | :: | db(:) |
pure subroutine backprop(self, y, dw, db)
!! Applies a backward propagation through the network
!! and returns the weight and bias gradients.
class(network_type), intent(in out) :: self
real(rk), intent(in) :: y(:)
type(array2d), allocatable, intent(out) :: dw(:)
type(array1d), allocatable, intent(out) :: db(:)
integer(ik) :: n, nm
associate(dims => self % dims, layers => self % layers)
call db_init(db, dims)
call dw_init(dw, dims)
n = size(dims)
db(n) % array = (layers(n) % a - y) * self % layers(n) % activation_prime(layers(n) % z)
dw(n-1) % array = matmul(reshape(layers(n-1) % a, [dims(n-1), 1]),&
reshape(db(n) % array, [1, dims(n)]))
do n = size(dims) - 1, 2, -1
db(n) % array = matmul(layers(n) % w, db(n+1) % array)&
* self % layers(n) % activation_prime(layers(n) % z)
dw(n-1) % array = matmul(reshape(layers(n-1) % a, [dims(n-1), 1]),&
reshape(db(n) % array, [1, dims(n)]))
end do
end associate
end subroutine backprop