Skip to content

Commit 2c929d8

Browse files
committed
Creating maxpool1d
1 parent a660d21 commit 2c929d8

9 files changed

+244
-27
lines changed

CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,8 @@ add_library(neural-fortran
4242
src/nf/nf_locally_connected_1d_submodule.f90
4343
src/nf/nf_loss.f90
4444
src/nf/nf_loss_submodule.f90
45+
src/nf/nf_maxpool1d_layer.f90
46+
src/nf/nf_maxpool1d_layer_submodule.f90
4547
src/nf/nf_maxpool2d_layer.f90
4648
src/nf/nf_maxpool2d_layer_submodule.f90
4749
src/nf/nf_metrics.f90

example/cnn_mnist_1d.f90

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
program cnn_mnist
22

33
use nf, only: network, sgd, &
4-
input, conv2d, maxpool2d, flatten, dense, reshape, reshape_generalized, locally_connected_1d, &
4+
input, conv2d, maxpool1d, maxpool2d, flatten, dense, reshape, reshape_generalized, locally_connected_1d, &
55
load_mnist, label_digits, softmax, relu
66

77
implicit none
@@ -20,11 +20,11 @@ program cnn_mnist
2020

2121
net = network([ &
2222
input(784), &
23-
reshape([1,28,28]), &
23+
reshape_generalized([1,784]), &
2424
locally_connected_1d(filters=8, kernel_size=3, activation=relu()), &
25-
maxpool2d(pool_size=2), &
25+
maxpool1d(pool_size=2), &
2626
locally_connected_1d(filters=16, kernel_size=3, activation=relu()), &
27-
maxpool2d(pool_size=2), &
27+
maxpool1d(pool_size=2), &
2828
dense(10, activation=softmax()) &
2929
])
3030

src/nf.f90

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ module nf
33
use nf_datasets_mnist, only: label_digits, load_mnist
44
use nf_layer, only: layer
55
use nf_layer_constructors, only: &
6-
conv2d, dense, flatten, input, maxpool2d, reshape, reshape_generalized, locally_connected_1d
6+
conv2d, dense, flatten, input, maxpool1d, maxpool2d, reshape, reshape_generalized, locally_connected_1d
77
use nf_loss, only: mse, quadratic
88
use nf_metrics, only: corr, maxabs
99
use nf_network, only: network

src/nf/nf_layer_constructors.f90

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ module nf_layer_constructors
88
implicit none
99

1010
private
11-
public :: conv2d, dense, flatten, input, locally_connected_1d, maxpool2d, reshape, reshape_generalized
11+
public :: conv2d, dense, flatten, input, locally_connected_1d, maxpool1d, maxpool2d, reshape, reshape_generalized
1212

1313
interface input
1414

@@ -152,6 +152,29 @@ module function conv2d(filters, kernel_size, activation) result(res)
152152
!! Resulting layer instance
153153
end function conv2d
154154

155+
module function maxpool1d(pool_size, stride) result(res)
156+
!! CHANGE THE COMMENTS !!!
157+
!! 1-d maxpooling layer constructor.
158+
!!
159+
!! This layer is for downscaling other layers, typically `conv2d`.
160+
!!
161+
!! Example:
162+
!!
163+
!! ```
164+
!! use nf, only :: maxpool2d, layer
165+
!! type(layer) :: maxpool2d_layer
166+
!! maxpool2d_layer = maxpool2d(pool_size=2)
167+
!! maxpool2d_layer = maxpool2d(pool_size=2, stride=3)
168+
!! ```
169+
integer, intent(in) :: pool_size
170+
!! Width of the pooling window, commonly 2
171+
integer, intent(in), optional :: stride
172+
!! Stride of the pooling window, commonly equal to `pool_size`;
173+
!! Defaults to `pool_size` if omitted.
174+
type(layer) :: res
175+
!! Resulting layer instance
176+
end function maxpool1d
177+
155178
module function maxpool2d(pool_size, stride) result(res)
156179
!! 2-d maxpooling layer constructor.
157180
!!
@@ -175,6 +198,7 @@ module function maxpool2d(pool_size, stride) result(res)
175198
end function maxpool2d
176199

177200
module function locally_connected_1d(filters, kernel_size, activation) result(res)
201+
!! CHANGE THE COMMENTS !!!
178202
!! 2-d convolutional layer constructor.
179203
!!
180204
!! This layer is for building 2-d convolutional network.

src/nf/nf_layer_constructors_submodule.f90

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
use nf_input2d_layer, only: input2d_layer
99
use nf_input3d_layer, only: input3d_layer
1010
use nf_locally_connected_1d_layer, only: locally_connected_1d_layer
11+
use nf_maxpool1d_layer, only: maxpool1d_layer
1112
use nf_maxpool2d_layer, only: maxpool2d_layer
1213
use nf_reshape_layer, only: reshape3d_layer
1314
use nf_reshape_layer_generalized, only: reshape_generalized_layer
@@ -130,7 +131,34 @@ module function locally_connected_1d(filters, kernel_size, activation) result(re
130131
)
131132

132133
end function locally_connected_1d
133-
134+
135+
module function maxpool1d(pool_size, stride) result(res)
136+
integer, intent(in) :: pool_size
137+
integer, intent(in), optional :: stride
138+
integer :: stride_
139+
type(layer) :: res
140+
141+
if (pool_size < 2) &
142+
error stop 'pool_size must be >= 2 in a maxpool1d layer'
143+
144+
! Stride defaults to pool_size if not provided
145+
if (present(stride)) then
146+
stride_ = stride
147+
else
148+
stride_ = pool_size
149+
end if
150+
151+
if (stride_ < 1) &
152+
error stop 'stride must be >= 1 in a maxpool1d layer'
153+
154+
res % name = 'maxpool1d'
155+
156+
allocate( &
157+
res % p, &
158+
source=maxpool1d_layer(pool_size, stride_) &
159+
)
160+
161+
end function maxpool1d
134162
>>>>>>> 0fbbda4 (adding locally_connected_1d to neural-fortran environment)
135163

136164
module function maxpool2d(pool_size, stride) result(res)

src/nf/nf_layer_submodule.f90

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,6 @@ pure module subroutine backward_1d(self, previous, gradient)
4545
call this_layer % backward(prev_layer % output, gradient)
4646
type is(conv2d_layer)
4747
call this_layer % backward(prev_layer % output, gradient)
48-
type is(locally_connected_1d_layer)
49-
call this_layer % backward(prev_layer % output, gradient)
5048
type is(maxpool2d_layer)
5149
call this_layer % backward(prev_layer % output, gradient)
5250
end select
@@ -55,7 +53,6 @@ pure module subroutine backward_1d(self, previous, gradient)
5553

5654
end subroutine backward_1d
5755

58-
5956
pure module subroutine backward_2d(self, previous, gradient)
6057
implicit none
6158
class(layer), intent(in out) :: self
@@ -88,8 +85,6 @@ pure module subroutine backward_3d(self, previous, gradient)
8885
call this_layer % backward(prev_layer % output, gradient)
8986
type is(conv2d_layer)
9087
call this_layer % backward(prev_layer % output, gradient)
91-
type is(locally_connected_1d_layer)
92-
call this_layer % backward(prev_layer % output, gradient)
9388
type is(reshape3d_layer)
9489
call this_layer % backward(prev_layer % output, gradient)
9590
end select
@@ -104,8 +99,6 @@ pure module subroutine backward_3d(self, previous, gradient)
10499
call this_layer % backward(prev_layer % output, gradient)
105100
type is(input3d_layer)
106101
call this_layer % backward(prev_layer % output, gradient)
107-
type is(locally_connected_1d_layer)
108-
call this_layer % backward(prev_layer % output, gradient)
109102
type is(reshape3d_layer)
110103
call this_layer % backward(prev_layer % output, gradient)
111104
end select
@@ -154,8 +147,6 @@ pure module subroutine forward(self, input)
154147
call this_layer % forward(prev_layer % output)
155148
type is(conv2d_layer)
156149
call this_layer % forward(prev_layer % output)
157-
type is(locally_connected_1d_layer)
158-
call this_layer % forward(prev_layer % output)
159150
type is(maxpool2d_layer)
160151
call this_layer % forward(prev_layer % output)
161152
type is(reshape3d_layer)
@@ -170,8 +161,6 @@ pure module subroutine forward(self, input)
170161
call this_layer % forward(prev_layer % output)
171162
type is(conv2d_layer)
172163
call this_layer % forward(prev_layer % output)
173-
type is(locally_connected_1d_layer)
174-
call this_layer % forward(prev_layer % output)
175164
type is(maxpool2d_layer)
176165
call this_layer % forward(prev_layer % output)
177166
type is(reshape3d_layer)
@@ -186,8 +175,6 @@ pure module subroutine forward(self, input)
186175
call this_layer % forward(prev_layer % output)
187176
type is(conv2d_layer)
188177
call this_layer % forward(prev_layer % output)
189-
type is(locally_connected_1d_layer)
190-
call this_layer % forward(prev_layer % output)
191178
type is(maxpool2d_layer)
192179
call this_layer % forward(prev_layer % output)
193180
type is(reshape3d_layer)
@@ -231,6 +218,21 @@ pure module subroutine get_output_1d(self, output)
231218

232219
end subroutine get_output_1d
233220

221+
pure module subroutine get_output_2d(self, output)
222+
implicit none
223+
class(layer), intent(in) :: self
224+
real, allocatable, intent(out) :: output(:,:)
225+
226+
select type(this_layer => self % p)
227+
228+
type is(locally_connected_1d_layer)
229+
allocate(output, source=this_layer % output)
230+
class default
231+
error stop '2d output can only be read from a locally_connected_1d layer'
232+
233+
end select
234+
235+
end subroutine get_output_2d
234236

235237
pure module subroutine get_output_2d(self, output)
236238
implicit none
@@ -260,8 +262,6 @@ pure module subroutine get_output_3d(self, output)
260262
allocate(output, source=this_layer % output)
261263
type is(conv2d_layer)
262264
allocate(output, source=this_layer % output)
263-
type is(locally_connected_1d_layer)
264-
allocate(output, source=this_layer % output)
265265
type is(maxpool2d_layer)
266266
allocate(output, source=this_layer % output)
267267
type is(reshape3d_layer)

src/nf/nf_maxpool1d_layer.f90

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
module nf_maxpool1d_layer
2+
!! This module provides the 1-d maxpooling layer.
3+
4+
use nf_base_layer, only: base_layer
5+
implicit none
6+
7+
private
8+
public :: maxpool1d_layer
9+
10+
type, extends(base_layer) :: maxpool1d_layer
11+
integer :: channels
12+
integer :: width ! Length of the input along the pooling dimension
13+
integer :: pool_size
14+
integer :: stride
15+
16+
! Location (as input matrix indices) of the maximum value within each pooling region.
17+
! Dimensions: (channels, new_width)
18+
integer, allocatable :: maxloc(:,:)
19+
20+
! Gradient for the input (same shape as the input).
21+
real, allocatable :: gradient(:,:)
22+
! Output after pooling (dimensions: (channels, new_width)).
23+
real, allocatable :: output(:,:)
24+
contains
25+
procedure :: init
26+
procedure :: forward
27+
procedure :: backward
28+
end type maxpool1d_layer
29+
30+
interface maxpool1d_layer
31+
pure module function maxpool1d_layer_cons(pool_size, stride) result(res)
32+
!! `maxpool1d` constructor function.
33+
integer, intent(in) :: pool_size
34+
!! Width of the pooling window.
35+
integer, intent(in) :: stride
36+
!! Stride of the pooling window.
37+
type(maxpool1d_layer) :: res
38+
end function maxpool1d_layer_cons
39+
end interface maxpool1d_layer
40+
41+
interface
42+
module subroutine init(self, input_shape)
43+
!! Initialize the `maxpool1d` layer instance with an input shape.
44+
class(maxpool1d_layer), intent(in out) :: self
45+
!! `maxpool1d_layer` instance.
46+
integer, intent(in) :: input_shape(:)
47+
!! Array shape of the input layer, expected as (channels, width).
48+
end subroutine init
49+
50+
pure module subroutine forward(self, input)
51+
!! Run a forward pass of the `maxpool1d` layer.
52+
class(maxpool1d_layer), intent(in out) :: self
53+
!! `maxpool1d_layer` instance.
54+
real, intent(in) :: input(:,:)
55+
!! Input data (output of the previous layer), with shape (channels, width).
56+
end subroutine forward
57+
58+
pure module subroutine backward(self, input, gradient)
59+
!! Run a backward pass of the `maxpool1d` layer.
60+
class(maxpool1d_layer), intent(in out) :: self
61+
!! `maxpool1d_layer` instance.
62+
real, intent(in) :: input(:,:)
63+
!! Input data (output of the previous layer).
64+
real, intent(in) :: gradient(:,:)
65+
!! Gradient from the downstream layer, with shape (channels, pooled width).
66+
end subroutine backward
67+
end interface
68+
69+
end module nf_maxpool1d_layer
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
submodule(nf_maxpool1d_layer) nf_maxpool1d_layer_submodule
2+
implicit none
3+
4+
contains
5+
6+
pure module function maxpool1d_layer_cons(pool_size, stride) result(res)
7+
implicit none
8+
integer, intent(in) :: pool_size
9+
integer, intent(in) :: stride
10+
type(maxpool1d_layer) :: res
11+
12+
res % pool_size = pool_size
13+
res % stride = stride
14+
end function maxpool1d_layer_cons
15+
16+
17+
module subroutine init(self, input_shape)
18+
implicit none
19+
class(maxpool1d_layer), intent(in out) :: self
20+
integer, intent(in) :: input_shape(:)
21+
! input_shape is expected to be (channels, width)
22+
23+
self % channels = input_shape(1)
24+
! The new width is the integer division of the input width by the stride.
25+
self % width = input_shape(2) / self % stride
26+
27+
! Allocate storage for the index of the maximum element within each pooling region.
28+
allocate(self % maxloc(self % channels, self % width))
29+
self % maxloc = 0
30+
31+
! Allocate the gradient array corresponding to the input dimensions.
32+
allocate(self % gradient(input_shape(1), input_shape(2)))
33+
self % gradient = 0
34+
35+
! Allocate the output array (after pooling).
36+
allocate(self % output(self % channels, self % width))
37+
self % output = 0
38+
end subroutine init
39+
40+
41+
pure module subroutine forward(self, input)
42+
implicit none
43+
class(maxpool1d_layer), intent(in out) :: self
44+
real, intent(in) :: input(:,:)
45+
integer :: input_width
46+
integer :: i, n
47+
integer :: ii, iend
48+
integer :: iextent
49+
integer :: max_index ! Temporary variable to hold the local index of the max
50+
integer :: maxloc_temp(1) ! Temporary array to hold the result of maxloc
51+
52+
input_width = size(input, dim=2)
53+
! Ensure we only process complete pooling regions.
54+
iextent = input_width - mod(input_width, self % stride)
55+
56+
! Loop over the input with a step size equal to the stride and over all channels.
57+
do concurrent (i = 1:iextent: self % stride, n = 1:self % channels)
58+
! Compute the index in the pooled (output) array.
59+
ii = (i - 1) / self % stride + 1
60+
! Determine the ending index of the current pooling region.
61+
iend = min(i + self % pool_size - 1, input_width)
62+
63+
! Find the index (within the pooling window) of the maximum value.
64+
maxloc_temp = maxloc(input(n, i:iend))
65+
max_index = maxloc_temp(1) + i - 1 ! Adjust to the index in the original input
66+
67+
! Store the location of the maximum value.
68+
self % maxloc(n, ii) = max_index
69+
! Set the output as the maximum value from this pooling region.
70+
self % output(n, ii) = input(n, max_index)
71+
end do
72+
end subroutine forward
73+
74+
75+
pure module subroutine backward(self, input, gradient)
76+
implicit none
77+
class(maxpool1d_layer), intent(in out) :: self
78+
real, intent(in) :: input(:,:)
79+
real, intent(in) :: gradient(:,:)
80+
integer :: channels, pooled_width
81+
integer :: i, n
82+
83+
channels = size(gradient, dim=1)
84+
pooled_width = size(gradient, dim=2)
85+
86+
! The gradient for max-pooling is nonzero only at the input locations
87+
! that were the maxima during the forward pass.
88+
do concurrent (n = 1:channels, i = 1:pooled_width)
89+
self % gradient(n, self % maxloc(n, i)) = gradient(n, i)
90+
end do
91+
end subroutine backward
92+
93+
end submodule nf_maxpool1d_layer_submodule

0 commit comments

Comments
 (0)