Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,8 @@ examples, in increasing level of complexity:
dense model from a Keras HDF5 file and running the inference.
6. [cnn_from_keras](example/cnn_from_keras.f90): Creating a pre-trained
convolutional model from a Keras HDF5 file and running the inference.
7. [get_set_network_params](example/get_set_network_params.f90): Getting and
setting hyperparameters of a network.

The examples also show you the extent of the public API that's meant to be
used in applications, i.e. anything from the `nf` module.
Expand Down
1 change: 1 addition & 0 deletions example/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ foreach(execid
cnn_from_keras
dense_mnist
dense_from_keras
get_set_network_params
simple
sine
)
Expand Down
79 changes: 79 additions & 0 deletions example/get_set_network_params.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
program get_set_network_params
use nf, only: dense, input, network
implicit none
type(network) :: net, net2
real :: x(1), y(1)
real, parameter :: pi = 4*atan(1.)
integer, parameter :: num_iterations = 100000
integer, parameter :: test_size = 30
real :: xtest(test_size), ytest(test_size), ypred(test_size)
integer :: i, n, nparam
real, allocatable :: parameters(:)

print '("Getting and setting network parameters")'
print '(60("="))'

net = network([ &
input(1), &
dense(5), &
dense(3), & ! only for testing purposes (this layer is not really needed to solve this problem)
dense(1) &
])

call net % print_info()

xtest = [((i - 1)*2*pi/test_size, i=1, test_size)]
ytest = (sin(xtest) + 1)/2

do n = 0, num_iterations

call random_number(x)
x = x*2*pi
y = (sin(x) + 1)/2

call net%forward(x)
call net%backward(y)
call net%update(1.)

if (mod(n, 10000) == 0) then
ypred = [(net % predict([xtest(i)]), i=1, test_size)]
print '(i0,1x,f9.6)', n, sum((ypred - ytest)**2)/size(ypred)
end if

end do

print *, ''
print '("Extract parameters")'
print *, ''

nparam = net % get_num_params()
print '("get_num_params = ", i0)', nparam

call net % get_params(parameters)

if (allocated(parameters)) then
print '("size(parameters) = ", i0)', size(parameters)
print *, 'parameters:', parameters
end if

net2 = network([ &
input(1), &
dense(5), &
dense(3), & ! only for testing purposes (this layer is not really needed to solve this problem)
dense(1) &
])

call net2 % print_info()

! copy the parameters from net to net2
call net2 % set_params(parameters)

ypred = [(net % predict([xtest(i)]), i=1, test_size)]
print *, 'Original network test output:'
print *, ypred

ypred = [(net2 % predict([xtest(i)]), i=1, test_size)]
print *, 'Cloned network test output:'
print *, ypred

end program get_set_network_params
2 changes: 1 addition & 1 deletion fpm.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name = "neural-fortran"
version = "0.9.0"
version = "0.10.0"
license = "MIT"
author = "Milan Curcic"
maintainer = "[email protected]"
Expand Down
29 changes: 29 additions & 0 deletions src/nf/nf_conv2d_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ module nf_conv2d_layer
procedure :: init
procedure :: forward
procedure :: backward
procedure :: get_num_params
procedure :: get_params
procedure :: set_params
procedure :: set_activation
procedure :: update

Expand Down Expand Up @@ -82,6 +85,32 @@ pure module subroutine backward(self, input, gradient)
!! Gradient (next layer)
end subroutine backward

pure module function get_num_params(self) result(num_params)
!! Get the number of parameters in the layer.
class(conv2d_layer), intent(in) :: self
!! A `conv2d_layer` instance
integer :: num_params
!! Number of parameters
end function get_num_params

pure module subroutine get_params(self, params)
!! Get the parameters of the layer.
class(conv2d_layer), intent(in) :: self
!! A `conv2d_layer` instance
real, allocatable, intent(in out) :: params(:)
!! Parameters to get
end subroutine get_params

module function set_params(self, params) result(consumed)
!! Set the parameters of the layer.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
real, intent(in) :: params(:)
!! Parameters to set
integer :: consumed
!! Number of parameters consumed
end function set_params

elemental module subroutine set_activation(self, activation)
!! Set the activation functions.
class(conv2d_layer), intent(in out) :: self
Expand Down
54 changes: 54 additions & 0 deletions src/nf/nf_conv2d_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,60 @@ pure module subroutine backward(self, input, gradient)

end subroutine backward


pure module function get_num_params(self) result(num_params)
class(conv2d_layer), intent(in) :: self
integer :: num_params
num_params = product(shape(self % kernel)) + size(self % biases)
end function get_num_params


pure module subroutine get_params(self, params)
class(conv2d_layer), intent(in) :: self
real, allocatable, intent(in out) :: params(:)

! automatic reallocation of params

! first pack the kernel
if (allocated(params)) then
params = [params, pack(self % kernel, .true.)]
else
params = pack(self % kernel, .true.)
end if

! then pack the biases
params = [params, pack(self % biases, .true.)]

end subroutine get_params


module function set_params(self, params) result(consumed)
class(conv2d_layer), intent(in out) :: self
real, intent(in) :: params(:)
integer :: consumed

! Check that the number of parameters is correct.
if (size(params) < self % get_num_params()) then
error stop 'Number of parameters does not match'
end if

! Reshape the kernel.
self % kernel = reshape( &
params(:product(shape(self % kernel))), &
shape(self % kernel) &
)

! Reshape the biases.
self % biases = reshape( &
params(product(shape(self % kernel)) + 1:), &
[self % filters] &
)

consumed = self%get_num_params()

end function set_params


elemental module subroutine set_activation(self, activation)
class(conv2d_layer), intent(in out) :: self
character(*), intent(in) :: activation
Expand Down
31 changes: 31 additions & 0 deletions src/nf/nf_dense_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ module nf_dense_layer

procedure :: backward
procedure :: forward
procedure :: get_num_params
procedure :: get_params
procedure :: set_params
procedure :: init
procedure :: set_activation
procedure :: update
Expand Down Expand Up @@ -80,6 +83,34 @@ pure module subroutine forward(self, input)
!! Input from the previous layer
end subroutine forward

pure module function get_num_params(self) result(num_params)
!! Return the number of parameters in this layer.
class(dense_layer), intent(in) :: self
!! Dense layer instance
integer :: num_params
!! Number of parameters in this layer
end function get_num_params

pure module subroutine get_params(self, params)
!! Return the parameters of this layer.
!! The parameters are ordered as weights first, biases second.
class(dense_layer), intent(in) :: self
!! Dense layer instance
real, allocatable, intent(in out) :: params(:)
!! Parameters of this layer
end subroutine get_params

module function set_params(self, params) result(consumed)
!! Set the parameters of this layer.
!! The parameters are ordered as weights first, biases second.
class(dense_layer), intent(in out) :: self
!! Dense layer instance
real, intent(in) :: params(:)
!! Parameters of this layer
integer :: consumed
!! Number of parameters consumed
end function set_params

module subroutine init(self, input_shape)
!! Initialize the layer data structures.
!!
Expand Down
56 changes: 56 additions & 0 deletions src/nf/nf_dense_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,62 @@ pure module subroutine forward(self, input)
end subroutine forward


pure module function get_num_params(self) result(num_params)
class(dense_layer), intent(in) :: self
integer :: num_params

! Number of weigths times number of biases
num_params = self % input_size * self % output_size + self % output_size

end function get_num_params


pure module subroutine get_params(self, params)
class(dense_layer), intent(in) :: self
real, allocatable, intent(in out) :: params(:)

! automatic reallocation of params

! first pack the weights
if (allocated(params)) then
params = [params, pack(self % weights, .true.)]
else
params = pack(self % weights, .true.)
end if

! then pack the biases
params = [params, pack(self % biases, .true.)]

end subroutine get_params


module function set_params(self, params) result(consumed)
class(dense_layer), intent(in out) :: self
real, intent(in) :: params(:)
integer :: consumed

! check if the number of parameters is correct
if (size(params) < self % get_num_params()) then
error stop 'Error: number of parameters does not match'
end if

! reshape the weights
self % weights = reshape( &
params(:self % input_size * self % output_size), &
[self % input_size, self % output_size] &
)

! reshape the biases
self % biases = reshape( &
params(self % input_size * self % output_size + 1:), &
[self % output_size] &
)

consumed = self % get_num_params()

end function set_params


module subroutine init(self, input_shape)
class(dense_layer), intent(in out) :: self
integer, intent(in) :: input_shape(:)
Expand Down
28 changes: 28 additions & 0 deletions src/nf/nf_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ module nf_layer
contains

procedure :: forward
procedure :: get_num_params
procedure :: get_params
procedure :: set_params
procedure :: init
procedure :: print_info
procedure :: update
Expand Down Expand Up @@ -117,6 +120,31 @@ impure elemental module subroutine print_info(self)
!! Layer instance
end subroutine print_info

pure module function get_num_params(self) result(num_params)
!! Returns the number of parameters in this layer.
class(layer), intent(in) :: self
!! Layer instance
integer :: num_params
!! Number of parameters in this layer
end function get_num_params

pure module subroutine get_params(self, params)
!! Returns the parameters of this layer.
class(layer), intent(in) :: self
!! Layer instance
real, allocatable, intent(in out) :: params(:)
!! Parameters of this layer
end subroutine get_params

impure module function set_params(self, params) result(consumed)
!! Returns the parameters of this layer.
class(layer), intent(in out) :: self
!! Layer instance
real, intent(in) :: params(:)
!! Parameters of this layer
integer :: consumed
end function set_params

impure elemental module subroutine update(self, learning_rate)
!! Update the weights and biases on the layer using the stored
!! gradients (from backward passes), and flush those same stored
Expand Down
Loading