Skip to content

Commit 52f958f

Browse files
committed
Updated the comments
1 parent a08fba0 commit 52f958f

File tree

4 files changed

+15
-16
lines changed

4 files changed

+15
-16
lines changed

src/nf/nf_conv1d_layer.f90

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ module nf_conv1d_layer
1717
integer :: filters
1818

1919
real, allocatable :: biases(:) ! size(filters)
20-
real, allocatable :: kernel(:,:,:) ! filters x channels x window x window
21-
real, allocatable :: output(:,:) ! filters x output_width * output_height
20+
real, allocatable :: kernel(:,:,:) ! filters x channels x window
21+
real, allocatable :: output(:,:) ! filters x output_width
2222
real, allocatable :: z(:,:) ! kernel .dot. input + bias
2323

2424
real, allocatable :: dw(:,:,:) ! weight (kernel) gradients

src/nf/nf_layer_constructors.f90

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -154,22 +154,21 @@ module function flatten() result(res)
154154
end function flatten
155155

156156
module function conv1d(filters, kernel_size, activation) result(res)
157-
!! CHANGE THE COMMENTS
158-
!! 2-d convolutional layer constructor.
157+
!! 1-d convolutional layer constructor.
159158
!!
160-
!! This layer is for building 2-d convolutional network.
161-
!! Although the established convention is to call these layers 2-d,
162-
!! the shape of the data is actuall 3-d: image width, image height,
159+
!! This layer is for building 1-d convolutional network.
160+
!! Although the established convention is to call these layers 1-d,
161+
!! the shape of the data is actually 2-d: image width
163162
!! and the number of channels.
164-
!! A conv2d layer must not be the first layer in the network.
163+
!! A conv1d layer must not be the first layer in the network.
165164
!!
166165
!! Example:
167166
!!
168167
!! ```
169-
!! use nf, only :: conv2d, layer
170-
!! type(layer) :: conv2d_layer
171-
!! conv2d_layer = dense(filters=32, kernel_size=3)
172-
!! conv2d_layer = dense(filters=32, kernel_size=3, activation='relu')
168+
!! use nf, only :: conv1d, layer
169+
!! type(layer) :: conv1d_layer
170+
!! conv1d_layer = dense(filters=32, kernel_size=3)
171+
!! conv1d_layer = dense(filters=32, kernel_size=3, activation='relu')
173172
!! ```
174173
integer, intent(in) :: filters
175174
!! Number of filters in the output of the layer

src/nf/nf_layer_submodule.f90

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ pure module subroutine backward_1d(self, previous, gradient)
2727
real, intent(in) :: gradient(:)
2828

2929
! Backward pass from a 1-d layer downstream currently implemented
30-
! only for dense and flatten layers
30+
! only for dense, dropout and flatten layers
3131
select type(this_layer => self % p)
3232

3333
type is(dense_layer)
@@ -50,7 +50,7 @@ pure module subroutine backward_1d(self, previous, gradient)
5050

5151
type is(flatten_layer)
5252

53-
! Upstream layers permitted: input2d, input3d, conv2d, locally_connected_1d, maxpool1d, maxpool2d
53+
! Upstream layers permitted: input2d, input3d, conv1d, conv2d, locally_connected_1d, maxpool1d, maxpool2d
5454
select type(prev_layer => previous % p)
5555
type is(input2d_layer)
5656
call this_layer % backward(prev_layer % output, gradient)

test/test_conv1d_network.f90

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ program test_conv1d_network
4141
real :: y(1)
4242
real :: tolerance = 1e-4
4343
integer :: n
44-
integer, parameter :: num_iterations = 1000
44+
integer, parameter :: num_iterations = 1500
4545

4646
! Test training of a minimal constant mapping
4747
allocate(sample_input(1, 5))
@@ -79,7 +79,7 @@ program test_conv1d_network
7979
real :: y(1)
8080
real :: tolerance = 1e-4
8181
integer :: n
82-
integer, parameter :: num_iterations = 1000
82+
integer, parameter :: num_iterations = 1500
8383

8484
call random_number(x)
8585
y = [0.1234567]

0 commit comments

Comments
 (0)