Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions src/nf/nf_activation_1d.f90
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ module nf_activation_1d
public :: elu, elu_prime
public :: exponential
public :: gaussian, gaussian_prime
public :: linear, linear_prime
public :: relu, relu_prime
public :: sigmoid, sigmoid_prime
public :: softmax, softmax_prime
Expand Down Expand Up @@ -72,6 +73,20 @@ pure function gaussian_prime(x) result(res)
res = -2 * x * gaussian(x)
end function gaussian_prime

pure function linear(x) result(res)
! Linear activation function.
real, intent(in) :: x(:)
real :: res(size(x))
res = x
end function linear

pure function linear_prime(x) result(res)
! First derivative of the Gaussian activation function.
real, intent(in) :: x(:)
real :: res(size(x))
res = 1
end function linear_prime

pure function relu(x) result(res)
!! Rectified Linear Unit (ReLU) activation function.
real, intent(in) :: x(:)
Expand Down
15 changes: 15 additions & 0 deletions src/nf/nf_activation_3d.f90
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ module nf_activation_3d
public :: elu, elu_prime
public :: exponential
public :: gaussian, gaussian_prime
public :: linear, linear_prime
public :: relu, relu_prime
public :: sigmoid, sigmoid_prime
public :: softmax, softmax_prime
Expand Down Expand Up @@ -72,6 +73,20 @@ pure function gaussian_prime(x) result(res)
res = -2 * x * gaussian(x)
end function gaussian_prime

pure function linear(x) result(res)
! Linear activation function.
real, intent(in) :: x(:,:,:)
real :: res(size(x,1),size(x,2),size(x,3))
res = x
end function linear

pure function linear_prime(x) result(res)
! First derivative of the linear activation function.
real, intent(in) :: x(:,:,:)
real :: res(size(x,1),size(x,2),size(x,3))
res = 1
end function linear_prime

pure function relu(x) result(res)
!! Rectified Linear Unit (ReLU) activation function.
real, intent(in) :: x(:,:,:)
Expand Down
11 changes: 8 additions & 3 deletions src/nf/nf_conv2d_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
elu, elu_prime, &
exponential, &
gaussian, gaussian_prime, &
linear, linear_prime, &
relu, relu_prime, &
sigmoid, sigmoid_prime, &
softmax, softmax_prime, &
Expand Down Expand Up @@ -254,6 +255,11 @@ elemental module subroutine set_activation(self, activation)
self % activation_prime => gaussian_prime
self % activation_name = 'gaussian'

case('linear')
self % activation => linear
self % activation_prime => linear_prime
self % activation_name = 'linear'

case('relu')
self % activation => relu
self % activation_prime => relu_prime
Expand Down Expand Up @@ -286,9 +292,8 @@ elemental module subroutine set_activation(self, activation)

case default
error stop 'Activation must be one of: ' // &
'"elu", "exponential", "gaussian", "relu", ' // &
'"sigmoid", "softmax", "softplus", "step", ' // &
'or "tanh".'
'"elu", "exponential", "gaussian", "linear", "relu", "sigmoid", ' // &
'"softmax", "softplus", "step", or "tanh".'

end select

Expand Down
11 changes: 8 additions & 3 deletions src/nf/nf_dense_layer_submodule.f90
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
elu, elu_prime, &
exponential, &
gaussian, gaussian_prime, &
linear, linear_prime, &
relu, relu_prime, &
sigmoid, sigmoid_prime, &
softmax, softmax_prime, &
Expand Down Expand Up @@ -157,6 +158,11 @@ elemental module subroutine set_activation(self, activation)
self % activation_prime => gaussian_prime
self % activation_name = 'gaussian'

case('linear')
self % activation => linear
self % activation_prime => linear_prime
self % activation_name = 'linear'

case('relu')
self % activation => relu
self % activation_prime => relu_prime
Expand Down Expand Up @@ -189,9 +195,8 @@ elemental module subroutine set_activation(self, activation)

case default
error stop 'Activation must be one of: ' // &
'"elu", "exponential", "gaussian", "relu", ' // &
'"sigmoid", "softmax", "softplus", "step", ' // &
'or "tanh".'
'"elu", "exponential", "gaussian", "linear", "relu", "sigmoid", ' // &
'"softmax", "softplus", "step", or "tanh".'

end select

Expand Down