Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,8 @@ examples, in increasing level of complexity:
dense model from a Keras HDF5 file and running the inference.
6. [cnn_from_keras](example/cnn_from_keras.f90): Creating a pre-trained
convolutional model from a Keras HDF5 file and running the inference.
7. [get_set_network_params](example/get_set_network_params.f90): Getting and
setting hyperparameters of a network.

The examples also show you the extent of the public API that's meant to be
used in applications, i.e. anything from the `nf` module.
Expand Down
1 change: 1 addition & 0 deletions example/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ foreach(execid
cnn_from_keras
dense_mnist
dense_from_keras
get_set_network_params
simple
sine
)
Expand Down
92 changes: 92 additions & 0 deletions example/get_set_network_params.f90
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
program get_set_network_params
use nf, only: dense, input, network
implicit none
type(network) :: net1, net2
real :: x(1), y(1)
real, parameter :: pi = 4 * atan(1.)
integer, parameter :: num_iterations = 100000
integer, parameter :: test_size = 30
real :: xtest(test_size), ytest(test_size)
real :: ypred1(test_size), ypred2(test_size)
integer :: i, n, nparam
real, allocatable :: parameters(:)

print '("Getting and setting network parameters")'
print '(60("="))'
print *
print '(a)', 'First, let''s instantiate small dense network net1'
print '(a)', 'of shape (1,5,1) and fit it to a sine function:'
print *

net1 = network([ &
input(1), &
dense(5), &
dense(1) &
])

call net1 % print_info()

xtest = [((i - 1) * 2 * pi / test_size, i=1, test_size)]
ytest = (sin(xtest) + 1) / 2

do n = 0, num_iterations

call random_number(x)
x = x * 2 * pi
y = (sin(x) + 1) / 2

call net1 % forward(x)
call net1 % backward(y)
call net1 % update(1.)

if (mod(n, 10000) == 0) then
ypred1 = [(net1 % predict([xtest(i)]), i=1, test_size)]
print '(a,i0,1x,f9.6)', 'Number of iterations, loss: ', &
n, sum((ypred1 - ytest)**2) / size(ypred1)
end if

end do

print *
print '(a)', 'Now, let''s see how many network parameters there are'
print '(a)', 'by printing the result of net1 % get_num_params():'
print *
print '("net1 % get_num_params() = ", i0)', net1 % get_num_params()
print *
print '(a)', 'We can see the values of the network parameters'
print '(a)', 'by printing the result of net1 % get_params():'
print *
print '("net1 % get_params() = ", *(g0,1x))', net1 % get_params()
print *
print '(a)', 'Now, let''s create another network of the same shape and set'
print '(a)', 'the parameters from the original network to it'
print '(a)', 'by calling call net2 % set_params(net1 % get_params()):'

net2 = network([ &
input(1), &
dense(5), &
dense(1) &
])

! Set the parameters of net1 to net2
call net2 % set_params(net1 % get_params())

print *
print '(a)', 'We can check that the second network now has the same'
print '(a)', 'parameters as net1:'
print *
print '("net2 % get_params() = ", *(g0,1x))', net2 % get_params()

ypred1 = [(net1 % predict([xtest(i)]), i=1, test_size)]
ypred2 = [(net2 % predict([xtest(i)]), i=1, test_size)]

print *
print '(a)', 'We can also check that the two networks produce the same output:'
print *
print '("net1 output: ", *(g0,1x))', ypred1
print '("net2 output: ", *(g0,1x))', ypred2

print *
print '(a)', 'Original and cloned network outputs match:', all(ypred1 == ypred2)

end program get_set_network_params
2 changes: 1 addition & 1 deletion fpm.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name = "neural-fortran"
version = "0.9.0"
version = "0.10.0"
license = "MIT"
author = "Milan Curcic"
maintainer = "[email protected]"
Expand Down
27 changes: 27 additions & 0 deletions src/nf/nf_conv2d_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ module nf_conv2d_layer
procedure :: init
procedure :: forward
procedure :: backward
procedure :: get_num_params
procedure :: get_params
procedure :: set_params
procedure :: set_activation
procedure :: update

Expand Down Expand Up @@ -82,6 +85,30 @@ pure module subroutine backward(self, input, gradient)
!! Gradient (next layer)
end subroutine backward

pure module function get_num_params(self) result(num_params)
!! Get the number of parameters in the layer.
class(conv2d_layer), intent(in) :: self
!! A `conv2d_layer` instance
integer :: num_params
!! Number of parameters
end function get_num_params

pure module function get_params(self) result(params)
!! Get the parameters of the layer.
class(conv2d_layer), intent(in) :: self
!! A `conv2d_layer` instance
real, allocatable :: params(:)
!! Parameters to get
end function get_params

module subroutine set_params(self, params)
!! Set the parameters of the layer.
class(conv2d_layer), intent(in out) :: self
!! A `conv2d_layer` instance
real, intent(in) :: params(:)
!! Parameters to set
end subroutine set_params

elemental module subroutine set_activation(self, activation)
!! Set the activation functions.
class(conv2d_layer), intent(in out) :: self
Expand Down
44 changes: 44 additions & 0 deletions src/nf/nf_conv2d_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,50 @@ pure module subroutine backward(self, input, gradient)

end subroutine backward


pure module function get_num_params(self) result(num_params)
class(conv2d_layer), intent(in) :: self
integer :: num_params
num_params = product(shape(self % kernel)) + size(self % biases)
end function get_num_params


pure module function get_params(self) result(params)
class(conv2d_layer), intent(in) :: self
real, allocatable :: params(:)

params = [ &
pack(self % kernel, .true.), &
pack(self % biases, .true.) &
]

end function get_params


module subroutine set_params(self, params)
class(conv2d_layer), intent(in out) :: self
real, intent(in) :: params(:)

! Check that the number of parameters is correct.
if (size(params) /= self % get_num_params()) then
error stop 'conv2d % set_params: Number of parameters does not match'
end if

! Reshape the kernel.
self % kernel = reshape( &
params(:product(shape(self % kernel))), &
shape(self % kernel) &
)

! Reshape the biases.
self % biases = reshape( &
params(product(shape(self % kernel)) + 1:), &
[self % filters] &
)

end subroutine set_params


elemental module subroutine set_activation(self, activation)
class(conv2d_layer), intent(in out) :: self
character(*), intent(in) :: activation
Expand Down
29 changes: 29 additions & 0 deletions src/nf/nf_dense_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,9 @@ module nf_dense_layer

procedure :: backward
procedure :: forward
procedure :: get_num_params
procedure :: get_params
procedure :: set_params
procedure :: init
procedure :: set_activation
procedure :: update
Expand Down Expand Up @@ -80,6 +83,32 @@ pure module subroutine forward(self, input)
!! Input from the previous layer
end subroutine forward

pure module function get_num_params(self) result(num_params)
!! Return the number of parameters in this layer.
class(dense_layer), intent(in) :: self
!! Dense layer instance
integer :: num_params
!! Number of parameters in this layer
end function get_num_params

pure module function get_params(self) result(params)
!! Return the parameters of this layer.
!! The parameters are ordered as weights first, biases second.
class(dense_layer), intent(in) :: self
!! Dense layer instance
real, allocatable :: params(:)
!! Parameters of this layer
end function get_params

module subroutine set_params(self, params)
!! Set the parameters of this layer.
!! The parameters are ordered as weights first, biases second.
class(dense_layer), intent(in out) :: self
!! Dense layer instance
real, intent(in) :: params(:)
!! Parameters of this layer
end subroutine set_params

module subroutine init(self, input_shape)
!! Initialize the layer data structures.
!!
Expand Down
46 changes: 46 additions & 0 deletions src/nf/nf_dense_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,52 @@ pure module subroutine forward(self, input)
end subroutine forward


pure module function get_num_params(self) result(num_params)
class(dense_layer), intent(in) :: self
integer :: num_params

! Number of weigths times number of biases
num_params = self % input_size * self % output_size + self % output_size

end function get_num_params


pure module function get_params(self) result(params)
class(dense_layer), intent(in) :: self
real, allocatable :: params(:)

params = [ &
pack(self % weights, .true.), &
pack(self % biases, .true.) &
]

end function get_params


module subroutine set_params(self, params)
class(dense_layer), intent(in out) :: self
real, intent(in) :: params(:)

! check if the number of parameters is correct
if (size(params) /= self % get_num_params()) then
error stop 'Error: number of parameters does not match'
end if

! reshape the weights
self % weights = reshape( &
params(:self % input_size * self % output_size), &
[self % input_size, self % output_size] &
)

! reshape the biases
self % biases = reshape( &
params(self % input_size * self % output_size + 1:), &
[self % output_size] &
)

end subroutine set_params


module subroutine init(self, input_shape)
class(dense_layer), intent(in out) :: self
integer, intent(in) :: input_shape(:)
Expand Down
27 changes: 27 additions & 0 deletions src/nf/nf_layer.f90
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ module nf_layer
contains

procedure :: forward
procedure :: get_num_params
procedure :: get_params
procedure :: set_params
procedure :: init
procedure :: print_info
procedure :: update
Expand Down Expand Up @@ -117,6 +120,30 @@ impure elemental module subroutine print_info(self)
!! Layer instance
end subroutine print_info

elemental module function get_num_params(self) result(num_params)
!! Returns the number of parameters in this layer.
class(layer), intent(in) :: self
!! Layer instance
integer :: num_params
!! Number of parameters in this layer
end function get_num_params

pure module function get_params(self) result(params)
!! Returns the parameters of this layer.
class(layer), intent(in) :: self
!! Layer instance
real, allocatable :: params(:)
!! Parameters of this layer
end function get_params

module subroutine set_params(self, params)
!! Returns the parameters of this layer.
class(layer), intent(in out) :: self
!! Layer instance
real, intent(in) :: params(:)
!! Parameters of this layer
end subroutine set_params

impure elemental module subroutine update(self, learning_rate)
!! Update the weights and biases on the layer using the stored
!! gradients (from backward passes), and flush those same stored
Expand Down
Loading