diff --git a/CMakeLists.txt b/CMakeLists.txt index 667b5d73..da6c1025 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -43,8 +43,8 @@ add_library(neural-fortran src/nf/nf_layernorm_submodule.f90 src/nf/nf_layer.f90 src/nf/nf_layer_submodule.f90 - src/nf/nf_locally_connected1d_layer_submodule.f90 - src/nf/nf_locally_connected1d_layer.f90 + src/nf/nf_locally_connected2d_layer_submodule.f90 + src/nf/nf_locally_connected2d_layer.f90 src/nf/nf_linear2d_layer.f90 src/nf/nf_linear2d_layer_submodule.f90 src/nf/nf_embedding_layer.f90 diff --git a/README.md b/README.md index e35673e6..5dbda06b 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Read the paper [here](https://arxiv.org/abs/1902.06714). | Embedding | `embedding` | n/a | 2 | ✅ | ✅ | | Dense (fully-connected) | `dense` | `input1d`, `dense`, `dropout`, `flatten` | 1 | ✅ | ✅ | | Dropout | `dropout` | `dense`, `flatten`, `input1d` | 1 | ✅ | ✅ | -| Locally connected (1-d) | `locally_connected` | `input`, `locally_connected`, `conv`, `maxpool`, `reshape` | 2 | ✅ | ✅ | +| Locally connected (2-d) | `locally_connected` | `input`, `locally_connected`, `conv`, `maxpool`, `reshape` | 2 | ✅ | ✅ | | Convolutional (1-d and 2-d) | `conv` | `input`, `conv`, `maxpool`, `reshape` | 2, 3 | ✅ | ✅ | | Max-pooling (1-d and 2-d) | `maxpool` | `input`, `conv`, `maxpool`, `reshape` | 2, 3 | ✅ | ✅ | | Linear (2-d) | `linear2d` | `input2d`, `layernorm`, `linear2d`, `self_attention` | 2 | ✅ | ✅ | diff --git a/src/nf/nf_layer_constructors.f90 b/src/nf/nf_layer_constructors.f90 index 80860bdf..12492311 100644 --- a/src/nf/nf_layer_constructors.f90 +++ b/src/nf/nf_layer_constructors.f90 @@ -156,7 +156,7 @@ end function conv2d interface locally_connected - module function locally_connected1d(filters, kernel_size, activation) result(res) + module function locally_connected2d(filters, kernel_size, activation) result(res) !! 1-d locally connected network constructor !! !! This layer is for building 1-d locally connected network. @@ -168,10 +168,10 @@ module function locally_connected1d(filters, kernel_size, activation) result(res !! Example: !! !! ``` - !! use nf, only :: locally_connected1d, layer - !! type(layer) :: locally_connected1d_layer - !! locally_connected1d_layer = dense(filters=32, kernel_size=3) - !! locally_connected1d_layer = dense(filters=32, kernel_size=3, activation='relu') + !! use nf, only :: locally_connected2d, layer + !! type(layer) :: locally_connected2d_layer + !! locally_connected2d_layer = dense(filters=32, kernel_size=3) + !! locally_connected2d_layer = dense(filters=32, kernel_size=3, activation='relu') !! ``` integer, intent(in) :: filters !! Number of filters in the output of the layer @@ -181,7 +181,7 @@ module function locally_connected1d(filters, kernel_size, activation) result(res !! Activation function (default sigmoid) type(layer) :: res !! Resulting layer instance - end function locally_connected1d + end function locally_connected2d end interface locally_connected diff --git a/src/nf/nf_layer_constructors_submodule.f90 b/src/nf/nf_layer_constructors_submodule.f90 index 7918ee1c..23bb2284 100644 --- a/src/nf/nf_layer_constructors_submodule.f90 +++ b/src/nf/nf_layer_constructors_submodule.f90 @@ -9,7 +9,7 @@ use nf_input1d_layer, only: input1d_layer use nf_input2d_layer, only: input2d_layer use nf_input3d_layer, only: input3d_layer - use nf_locally_connected1d_layer, only: locally_connected1d_layer + use nf_locally_connected2d_layer, only: locally_connected2d_layer use nf_maxpool1d_layer, only: maxpool1d_layer use nf_maxpool2d_layer, only: maxpool2d_layer use nf_reshape2d_layer, only: reshape2d_layer @@ -81,7 +81,7 @@ module function conv2d(filters, kernel_width, kernel_height, activation) result( end function conv2d - module function locally_connected1d(filters, kernel_size, activation) result(res) + module function locally_connected2d(filters, kernel_size, activation) result(res) integer, intent(in) :: filters integer, intent(in) :: kernel_size class(activation_function), intent(in), optional :: activation @@ -89,7 +89,7 @@ module function locally_connected1d(filters, kernel_size, activation) result(res class(activation_function), allocatable :: activation_tmp - res % name = 'locally_connected1d' + res % name = 'locally_connected2d' if (present(activation)) then allocate(activation_tmp, source=activation) @@ -101,10 +101,10 @@ module function locally_connected1d(filters, kernel_size, activation) result(res allocate( & res % p, & - source=locally_connected1d_layer(filters, kernel_size, activation_tmp) & + source=locally_connected2d_layer(filters, kernel_size, activation_tmp) & ) - end function locally_connected1d + end function locally_connected2d module function dense(layer_size, activation) result(res) diff --git a/src/nf/nf_layer_submodule.f90 b/src/nf/nf_layer_submodule.f90 index 778d227a..5b74eb5d 100644 --- a/src/nf/nf_layer_submodule.f90 +++ b/src/nf/nf_layer_submodule.f90 @@ -9,7 +9,7 @@ use nf_input1d_layer, only: input1d_layer use nf_input2d_layer, only: input2d_layer use nf_input3d_layer, only: input3d_layer - use nf_locally_connected1d_layer, only: locally_connected1d_layer + use nf_locally_connected2d_layer, only: locally_connected2d_layer use nf_maxpool1d_layer, only: maxpool1d_layer use nf_maxpool2d_layer, only: maxpool2d_layer use nf_reshape2d_layer, only: reshape2d_layer @@ -52,11 +52,11 @@ pure module subroutine backward_1d(self, previous, gradient) type is(flatten_layer) - ! Upstream layers permitted: input2d, input3d, conv1d, conv2d, locally_connected1d, maxpool1d, maxpool2d + ! Upstream layers permitted: input2d, input3d, conv1d, conv2d, locally_connected2d, maxpool1d, maxpool2d select type(prev_layer => previous % p) type is(input2d_layer) call this_layer % backward(prev_layer % output, gradient) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % backward(prev_layer % output, gradient) type is(maxpool1d_layer) call this_layer % backward(prev_layer % output, gradient) @@ -145,13 +145,13 @@ pure module subroutine backward_2d(self, previous, gradient) call this_layer % backward(prev_layer % output, gradient) type is(input2d_layer) call this_layer % backward(prev_layer % output, gradient) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % backward(prev_layer % output, gradient) type is(conv1d_layer) call this_layer % backward(prev_layer % output, gradient) end select - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) select type(prev_layer => previous % p) type is(maxpool1d_layer) @@ -160,7 +160,7 @@ pure module subroutine backward_2d(self, previous, gradient) call this_layer % backward(prev_layer % output, gradient) type is(input2d_layer) call this_layer % backward(prev_layer % output, gradient) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % backward(prev_layer % output, gradient) type is(conv1d_layer) call this_layer % backward(prev_layer % output, gradient) @@ -173,7 +173,7 @@ pure module subroutine backward_2d(self, previous, gradient) call this_layer % backward(prev_layer % output, gradient) type is(reshape2d_layer) call this_layer % backward(prev_layer % output, gradient) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % backward(prev_layer % output, gradient) type is(input2d_layer) call this_layer % backward(prev_layer % output, gradient) @@ -294,13 +294,13 @@ module subroutine forward(self, input) call this_layer % forward(prev_layer % output) end select - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) - ! Upstream layers permitted: input2d, locally_connected1d, maxpool1d, reshape2d + ! Upstream layers permitted: input2d, locally_connected2d, maxpool1d, reshape2d select type(prev_layer => input % p) type is(input2d_layer) call this_layer % forward(prev_layer % output) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % forward(prev_layer % output) type is(maxpool1d_layer) call this_layer % forward(prev_layer % output) @@ -312,11 +312,11 @@ module subroutine forward(self, input) type is(conv1d_layer) - ! Upstream layers permitted: input2d, locally_connected1d, maxpool1d, reshape2d + ! Upstream layers permitted: input2d, locally_connected2d, maxpool1d, reshape2d select type(prev_layer => input % p) type is(input2d_layer) call this_layer % forward(prev_layer % output) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % forward(prev_layer % output) type is(maxpool1d_layer) call this_layer % forward(prev_layer % output) @@ -328,11 +328,11 @@ module subroutine forward(self, input) type is(maxpool1d_layer) - ! Upstream layers permitted: input1d, locally_connected1d, maxpool1d, reshape2d + ! Upstream layers permitted: input1d, locally_connected2d, maxpool1d, reshape2d select type(prev_layer => input % p) type is(input2d_layer) call this_layer % forward(prev_layer % output) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % forward(prev_layer % output) type is(maxpool1d_layer) call this_layer % forward(prev_layer % output) @@ -368,7 +368,7 @@ module subroutine forward(self, input) call this_layer % forward(prev_layer % output) type is(conv2d_layer) call this_layer % forward(prev_layer % output) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % forward(prev_layer % output) type is(maxpool1d_layer) call this_layer % forward(prev_layer % output) @@ -481,7 +481,7 @@ pure module subroutine get_output_2d(self, output) allocate(output, source=this_layer % output) type is(maxpool1d_layer) allocate(output, source=this_layer % output) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) allocate(output, source=this_layer % output) type is(conv1d_layer) allocate(output, source=this_layer % output) @@ -497,7 +497,7 @@ pure module subroutine get_output_2d(self, output) allocate(output, source=this_layer % output) class default error stop '2-d output can only be read from a input2d, maxpool1d, ' & - // 'locally_connected1d, conv1d, reshape2d, embedding, linear2d, ' & + // 'locally_connected2d, conv1d, reshape2d, embedding, linear2d, ' & // 'self_attention, or layernorm layer.' end select @@ -549,7 +549,7 @@ impure elemental module subroutine init(self, input) self % layer_shape = shape(this_layer % output) type is(dropout_layer) self % layer_shape = shape(this_layer % output) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) self % layer_shape = shape(this_layer % output) type is(maxpool1d_layer) self % layer_shape = shape(this_layer % output) @@ -611,7 +611,7 @@ elemental module function get_num_params(self) result(num_params) num_params = this_layer % get_num_params() type is (conv2d_layer) num_params = this_layer % get_num_params() - type is (locally_connected1d_layer) + type is (locally_connected2d_layer) num_params = this_layer % get_num_params() type is (maxpool1d_layer) num_params = 0 @@ -656,7 +656,7 @@ module function get_params(self) result(params) params = this_layer % get_params() type is (conv2d_layer) params = this_layer % get_params() - type is (locally_connected1d_layer) + type is (locally_connected2d_layer) params = this_layer % get_params() type is (maxpool1d_layer) ! No parameters to get. @@ -732,7 +732,7 @@ module subroutine set_params(self, params) type is (conv2d_layer) call this_layer % set_params(params) - type is (locally_connected1d_layer) + type is (locally_connected2d_layer) call this_layer % set_params(params) type is (maxpool1d_layer) diff --git a/src/nf/nf_locally_connected1d_layer.f90 b/src/nf/nf_locally_connected2d_layer.f90 similarity index 68% rename from src/nf/nf_locally_connected1d_layer.f90 rename to src/nf/nf_locally_connected2d_layer.f90 index 6fea2c5c..2478dc0a 100644 --- a/src/nf/nf_locally_connected1d_layer.f90 +++ b/src/nf/nf_locally_connected2d_layer.f90 @@ -1,14 +1,14 @@ -module nf_locally_connected1d_layer - !! This modules provides a 1-d convolutional `locally_connected1d` type. +module nf_locally_connected2d_layer + !! This modules provides a 1-d convolutional `locally_connected2d` type. use nf_activation, only: activation_function use nf_base_layer, only: base_layer implicit none private - public :: locally_connected1d_layer + public :: locally_connected2d_layer - type, extends(base_layer) :: locally_connected1d_layer + type, extends(base_layer) :: locally_connected2d_layer integer :: width integer :: height @@ -39,18 +39,18 @@ module nf_locally_connected1d_layer procedure :: init procedure :: set_params - end type locally_connected1d_layer + end type locally_connected2d_layer - interface locally_connected1d_layer - module function locally_connected1d_layer_cons(filters, kernel_size, activation) & + interface locally_connected2d_layer + module function locally_connected2d_layer_cons(filters, kernel_size, activation) & result(res) - !! `locally_connected1d_layer` constructor function + !! `locally_connected2d_layer` constructor function integer, intent(in) :: filters integer, intent(in) :: kernel_size class(activation_function), intent(in) :: activation - type(locally_connected1d_layer) :: res - end function locally_connected1d_layer_cons - end interface locally_connected1d_layer + type(locally_connected2d_layer) :: res + end function locally_connected2d_layer_cons + end interface locally_connected2d_layer interface @@ -58,24 +58,24 @@ module subroutine init(self, input_shape) !! Initialize the layer data structures. !! !! This is a deferred procedure from the `base_layer` abstract type. - class(locally_connected1d_layer), intent(in out) :: self - !! A `locally_connected1d_layer` instance + class(locally_connected2d_layer), intent(in out) :: self + !! A `locally_connected2d_layer` instance integer, intent(in) :: input_shape(:) !! Input layer dimensions end subroutine init pure module subroutine forward(self, input) - !! Apply a forward pass on the `locally_connected1d` layer. - class(locally_connected1d_layer), intent(in out) :: self - !! A `locally_connected1d_layer` instance + !! Apply a forward pass on the `locally_connected2d` layer. + class(locally_connected2d_layer), intent(in out) :: self + !! A `locally_connected2d_layer` instance real, intent(in) :: input(:,:) !! Input data end subroutine forward pure module subroutine backward(self, input, gradient) - !! Apply a backward pass on the `locally_connected1d` layer. - class(locally_connected1d_layer), intent(in out) :: self - !! A `locally_connected1d_layer` instance + !! Apply a backward pass on the `locally_connected2d` layer. + class(locally_connected2d_layer), intent(in out) :: self + !! A `locally_connected2d_layer` instance real, intent(in) :: input(:,:) !! Input data (previous layer) real, intent(in) :: gradient(:,:) @@ -84,8 +84,8 @@ end subroutine backward pure module function get_num_params(self) result(num_params) !! Get the number of parameters in the layer. - class(locally_connected1d_layer), intent(in) :: self - !! A `locally_connected1d_layer` instance + class(locally_connected2d_layer), intent(in) :: self + !! A `locally_connected2d_layer` instance integer :: num_params !! Number of parameters end function get_num_params @@ -93,14 +93,14 @@ end function get_num_params module function get_params(self) result(params) !! Return the parameters (weights and biases) of this layer. !! The parameters are ordered as weights first, biases second. - class(locally_connected1d_layer), intent(in), target :: self - !! A `locally_connected1d_layer` instance + class(locally_connected2d_layer), intent(in), target :: self + !! A `locally_connected2d_layer` instance real, allocatable :: params(:) !! Parameters to get end function get_params module subroutine get_params_ptr(self, w_ptr, b_ptr) - class(locally_connected1d_layer), intent(in), target :: self + class(locally_connected2d_layer), intent(in), target :: self real, pointer, intent(out) :: w_ptr(:) real, pointer, intent(out) :: b_ptr(:) end subroutine get_params_ptr @@ -108,26 +108,26 @@ end subroutine get_params_ptr module function get_gradients(self) result(gradients) !! Return the gradients of this layer. !! The gradients are ordered as weights first, biases second. - class(locally_connected1d_layer), intent(in), target :: self - !! A `locally_connected1d_layer` instance + class(locally_connected2d_layer), intent(in), target :: self + !! A `locally_connected2d_layer` instance real, allocatable :: gradients(:) !! Gradients to get end function get_gradients module subroutine get_gradients_ptr(self, dw_ptr, db_ptr) - class(locally_connected1d_layer), intent(in), target :: self + class(locally_connected2d_layer), intent(in), target :: self real, pointer, intent(out) :: dw_ptr(:) real, pointer, intent(out) :: db_ptr(:) end subroutine get_gradients_ptr module subroutine set_params(self, params) !! Set the parameters of the layer. - class(locally_connected1d_layer), intent(in out) :: self - !! A `locally_connected1d_layer` instance + class(locally_connected2d_layer), intent(in out) :: self + !! A `locally_connected2d_layer` instance real, intent(in) :: params(:) !! Parameters to set end subroutine set_params end interface -end module nf_locally_connected1d_layer +end module nf_locally_connected2d_layer diff --git a/src/nf/nf_locally_connected1d_layer_submodule.f90 b/src/nf/nf_locally_connected2d_layer_submodule.f90 similarity index 84% rename from src/nf/nf_locally_connected1d_layer_submodule.f90 rename to src/nf/nf_locally_connected2d_layer_submodule.f90 index fa6110d5..5b2f5f85 100644 --- a/src/nf/nf_locally_connected1d_layer_submodule.f90 +++ b/src/nf/nf_locally_connected2d_layer_submodule.f90 @@ -1,4 +1,4 @@ -submodule(nf_locally_connected1d_layer) nf_locally_connected1d_layer_submodule +submodule(nf_locally_connected2d_layer) nf_locally_connected2d_layer_submodule use nf_activation, only: activation_function use nf_random, only: random_normal @@ -7,22 +7,22 @@ contains - module function locally_connected1d_layer_cons(filters, kernel_size, activation) result(res) + module function locally_connected2d_layer_cons(filters, kernel_size, activation) result(res) implicit none integer, intent(in) :: filters integer, intent(in) :: kernel_size class(activation_function), intent(in) :: activation - type(locally_connected1d_layer) :: res + type(locally_connected2d_layer) :: res res % kernel_size = kernel_size res % filters = filters res % activation_name = activation % get_name() allocate(res % activation, source = activation) - end function locally_connected1d_layer_cons + end function locally_connected2d_layer_cons module subroutine init(self, input_shape) implicit none - class(locally_connected1d_layer), intent(in out) :: self + class(locally_connected2d_layer), intent(in out) :: self integer, intent(in) :: input_shape(:) self % channels = input_shape(1) @@ -53,7 +53,7 @@ end subroutine init pure module subroutine forward(self, input) implicit none - class(locally_connected1d_layer), intent(in out) :: self + class(locally_connected2d_layer), intent(in out) :: self real, intent(in) :: input(:,:) integer :: input_channels, input_width integer :: j, n @@ -74,7 +74,7 @@ end subroutine forward pure module subroutine backward(self, input, gradient) implicit none - class(locally_connected1d_layer), intent(in out) :: self + class(locally_connected2d_layer), intent(in out) :: self real, intent(in) :: input(:,:) real, intent(in) :: gradient(:,:) integer :: input_channels, input_width, output_width @@ -117,19 +117,19 @@ pure module subroutine backward(self, input, gradient) end subroutine backward pure module function get_num_params(self) result(num_params) - class(locally_connected1d_layer), intent(in) :: self + class(locally_connected2d_layer), intent(in) :: self integer :: num_params num_params = product(shape(self % kernel)) + product(shape(self % biases)) end function get_num_params module function get_params(self) result(params) - class(locally_connected1d_layer), intent(in), target :: self + class(locally_connected2d_layer), intent(in), target :: self real, allocatable :: params(:) params = [self % kernel, self % biases] end function get_params module subroutine get_params_ptr(self, w_ptr, b_ptr) - class(locally_connected1d_layer), intent(in), target :: self + class(locally_connected2d_layer), intent(in), target :: self real, pointer, intent(out) :: w_ptr(:) real, pointer, intent(out) :: b_ptr(:) w_ptr(1:size(self % kernel)) => self % kernel @@ -137,13 +137,13 @@ module subroutine get_params_ptr(self, w_ptr, b_ptr) end subroutine get_params_ptr module function get_gradients(self) result(gradients) - class(locally_connected1d_layer), intent(in), target :: self + class(locally_connected2d_layer), intent(in), target :: self real, allocatable :: gradients(:) gradients = [self % dw, self % db] end function get_gradients module subroutine get_gradients_ptr(self, dw_ptr, db_ptr) - class(locally_connected1d_layer), intent(in), target :: self + class(locally_connected2d_layer), intent(in), target :: self real, pointer, intent(out) :: dw_ptr(:) real, pointer, intent(out) :: db_ptr(:) dw_ptr(1:size(self % dw)) => self % dw @@ -151,11 +151,11 @@ module subroutine get_gradients_ptr(self, dw_ptr, db_ptr) end subroutine get_gradients_ptr module subroutine set_params(self, params) - class(locally_connected1d_layer), intent(in out) :: self + class(locally_connected2d_layer), intent(in out) :: self real, intent(in) :: params(:) if (size(params) /= self % get_num_params()) then - error stop 'locally_connected1d_layer % set_params: Number of parameters does not match' + error stop 'locally_connected2d_layer % set_params: Number of parameters does not match' end if self % kernel = reshape(params(:product(shape(self % kernel))), shape(self % kernel)) @@ -165,4 +165,4 @@ module subroutine set_params(self, params) end subroutine set_params -end submodule nf_locally_connected1d_layer_submodule +end submodule nf_locally_connected2d_layer_submodule diff --git a/src/nf/nf_network_submodule.f90 b/src/nf/nf_network_submodule.f90 index df4498be..df95963a 100644 --- a/src/nf/nf_network_submodule.f90 +++ b/src/nf/nf_network_submodule.f90 @@ -8,7 +8,7 @@ use nf_input1d_layer, only: input1d_layer use nf_input2d_layer, only: input2d_layer use nf_input3d_layer, only: input3d_layer - use nf_locally_connected1d_layer, only: locally_connected1d_layer + use nf_locally_connected2d_layer, only: locally_connected2d_layer use nf_maxpool1d_layer, only: maxpool1d_layer use nf_maxpool2d_layer, only: maxpool2d_layer use nf_reshape2d_layer, only: reshape2d_layer @@ -79,7 +79,7 @@ module function network_from_layers(layers) result(res) type is(conv2d_layer) res % layers = [res % layers(:n-1), flatten(), res % layers(n:)] n = n + 1 - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) res % layers = [res % layers(:n-1), flatten(), res % layers(n:)] n = n + 1 type is(maxpool2d_layer) @@ -185,7 +185,7 @@ module subroutine backward(self, output, loss) call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) type is(conv1d_layer) call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) type is(layernorm_layer) call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) @@ -663,7 +663,7 @@ module subroutine update(self, optimizer, batch_size) type is(conv1d_layer) call co_sum(this_layer % dw) call co_sum(this_layer % db) - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call co_sum(this_layer % dw) call co_sum(this_layer % db) end select @@ -693,7 +693,7 @@ module subroutine update(self, optimizer, batch_size) call self % layers(n) % optimizer % minimize(biases, db / batch_size_) this_layer % dw = 0 this_layer % db = 0 - type is(locally_connected1d_layer) + type is(locally_connected2d_layer) call this_layer % get_params_ptr(weights, biases) call this_layer % get_gradients_ptr(dw, db) call self % layers(n) % optimizer % minimize(weights, dw / batch_size_) diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index ec4e139e..922a2936 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -8,7 +8,7 @@ foreach(execid dense_layer conv1d_layer conv2d_layer - locally_connected1d_layer + locally_connected2d_layer maxpool1d_layer maxpool2d_layer flatten_layer diff --git a/test/test_locally_connected1d_layer.f90 b/test/test_locally_connected2d_layer.f90 similarity index 76% rename from test/test_locally_connected1d_layer.f90 rename to test/test_locally_connected2d_layer.f90 index cde0a965..0157b916 100644 --- a/test/test_locally_connected1d_layer.f90 +++ b/test/test_locally_connected2d_layer.f90 @@ -1,4 +1,4 @@ -program test_locally_connected1d_layer +program test_locally_connected2d_layer use iso_fortran_env, only: stderr => error_unit use nf, only: locally_connected, input, layer @@ -14,19 +14,19 @@ program test_locally_connected1d_layer locally_connected_1d_layer = locally_connected(filters, kernel_size) - if (.not. locally_connected_1d_layer % name == 'locally_connected1d') then + if (.not. locally_connected_1d_layer % name == 'locally_connected2d') then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer has its name set correctly.. failed' + write(stderr, '(a)') 'locally_connected2d layer has its name set correctly.. failed' end if if (locally_connected_1d_layer % initialized) then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer should not be marked as initialized yet.. failed' + write(stderr, '(a)') 'locally_connected2d layer should not be marked as initialized yet.. failed' end if if (.not. locally_connected_1d_layer % activation == 'relu') then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer defaults to relu activation.. failed' + write(stderr, '(a)') 'locally_connected2d layer defaults to relu activation.. failed' end if input_layer = input(3, 32) @@ -34,17 +34,17 @@ program test_locally_connected1d_layer if (.not. locally_connected_1d_layer % initialized) then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer should now be marked as initialized.. failed' + write(stderr, '(a)') 'locally_connected2d layer should now be marked as initialized.. failed' end if if (.not. all(locally_connected_1d_layer % input_layer_shape == [3, 32])) then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer input layer shape should be correct.. failed' + write(stderr, '(a)') 'locally_connected2d layer input layer shape should be correct.. failed' end if if (.not. all(locally_connected_1d_layer % layer_shape == [filters, 30])) then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer input layer shape should be correct.. failed' + write(stderr, '(a)') 'locally_connected2d layer input layer shape should be correct.. failed' end if ! Minimal locally_connected_1d layer: 1 channel, 3x3 pixel image; @@ -64,14 +64,14 @@ program test_locally_connected1d_layer if (.not. all(abs(output) < tolerance)) then ok = .false. - write(stderr, '(a)') 'locally_connected1d layer with zero input and sigmoid function must forward to all 0.5.. failed' + write(stderr, '(a)') 'locally_connected2d layer with zero input and sigmoid function must forward to all 0.5.. failed' end if if (ok) then - print '(a)', 'test_locally_connected1d_layer: All tests passed.' + print '(a)', 'test_locally_connected2d_layer: All tests passed.' else - write(stderr, '(a)') 'test_locally_connected1d_layer: One or more tests failed.' + write(stderr, '(a)') 'test_locally_connected2d_layer: One or more tests failed.' stop 1 end if -end program test_locally_connected1d_layer +end program test_locally_connected2d_layer