Skip to content

Commit 9875f5d

Browse files
author
Vandenplas, Jeremie
committed
Removed unused variables
1 parent 00acae2 commit 9875f5d

File tree

5 files changed

+4
-40
lines changed

5 files changed

+4
-40
lines changed

src/nf/nf_conv1d_layer_submodule.f90

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ module function conv1d_layer_cons(filters, kernel_size, activation) result(res)
2020
end function conv1d_layer_cons
2121

2222
module subroutine init(self, input_shape)
23-
implicit none
2423
class(conv1d_layer), intent(in out) :: self
2524
integer, intent(in) :: input_shape(:)
2625

@@ -54,16 +53,11 @@ module subroutine init(self, input_shape)
5453
end subroutine init
5554

5655
pure module subroutine forward(self, input)
57-
implicit none
5856
class(conv1d_layer), intent(in out) :: self
5957
real, intent(in) :: input(:,:)
60-
integer :: input_channels, input_width
6158
integer :: j, n
6259
integer :: iws, iwe
6360

64-
input_channels = size(input, dim=1)
65-
input_width = size(input, dim=2)
66-
6761
! Loop over output positions.
6862
do j = 1, self % width
6963
! Compute the input window corresponding to output index j.
@@ -85,14 +79,12 @@ pure module subroutine forward(self, input)
8579
end subroutine forward
8680

8781
pure module subroutine backward(self, input, gradient)
88-
implicit none
8982
class(conv1d_layer), intent(in out) :: self
9083
! 'input' has shape: (channels, input_width)
9184
! 'gradient' (dL/dy) has shape: (filters, output_width)
9285
real, intent(in) :: input(:,:)
9386
real, intent(in) :: gradient(:,:)
9487

95-
integer :: input_channels, input_width, output_width
9688
integer :: j, n, k
9789
integer :: iws, iwe
9890

@@ -101,11 +93,6 @@ pure module subroutine backward(self, input, gradient)
10193
real :: db_local(self % filters)
10294
real :: dw_local(self % filters, self % channels, self % kernel_size)
10395

104-
! Determine dimensions.
105-
input_channels = size(input, dim=1)
106-
input_width = size(input, dim=2)
107-
output_width = self % width ! Note: output_width = input_width - kernel_size + 1
108-
10996
!--- Compute the local gradient gdz = (dL/dy) * sigma'(z) for each output.
11097
gdz = gradient * self % activation % eval_prime(self % z)
11198

@@ -120,7 +107,7 @@ pure module subroutine backward(self, input, gradient)
120107
! In the forward pass the window for output index j was:
121108
! iws = j, iwe = j + kernel_size - 1.
122109
do n = 1, self % filters
123-
do j = 1, output_width
110+
do j = 1, self % width
124111
iws = j
125112
iwe = j + self % kernel_size - 1
126113
do k = 1, self % channels

src/nf/nf_conv2d_layer_submodule.f90

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,6 @@ end subroutine init
6262

6363

6464
pure module subroutine forward(self, input)
65-
implicit none
6665
class(conv2d_layer), intent(in out) :: self
6766
real, intent(in) :: input(:,:,:)
6867
integer :: input_width, input_height, input_channels
@@ -113,7 +112,6 @@ end subroutine forward
113112

114113

115114
pure module subroutine backward(self, input, gradient)
116-
implicit none
117115
class(conv2d_layer), intent(in out) :: self
118116
real, intent(in) :: input(:,:,:)
119117
real, intent(in) :: gradient(:,:,:)

src/nf/nf_locally_connected2d_layer_submodule.f90

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
contains
99

1010
module function locally_connected2d_layer_cons(filters, kernel_size, activation) result(res)
11-
implicit none
1211
integer, intent(in) :: filters
1312
integer, intent(in) :: kernel_size
1413
class(activation_function), intent(in) :: activation
@@ -21,7 +20,6 @@ module function locally_connected2d_layer_cons(filters, kernel_size, activation)
2120
end function locally_connected2d_layer_cons
2221

2322
module subroutine init(self, input_shape)
24-
implicit none
2523
class(locally_connected2d_layer), intent(in out) :: self
2624
integer, intent(in) :: input_shape(:)
2725

@@ -52,16 +50,11 @@ module subroutine init(self, input_shape)
5250
end subroutine init
5351

5452
pure module subroutine forward(self, input)
55-
implicit none
5653
class(locally_connected2d_layer), intent(in out) :: self
5754
real, intent(in) :: input(:,:)
58-
integer :: input_channels, input_width
5955
integer :: j, n
6056
integer :: iws, iwe
6157

62-
input_channels = size(input, dim=1)
63-
input_width = size(input, dim=2)
64-
6558
do j = 1, self % width
6659
iws = j
6760
iwe = j + self % kernel_size - 1
@@ -73,27 +66,21 @@ pure module subroutine forward(self, input)
7366
end subroutine forward
7467

7568
pure module subroutine backward(self, input, gradient)
76-
implicit none
7769
class(locally_connected2d_layer), intent(in out) :: self
7870
real, intent(in) :: input(:,:)
7971
real, intent(in) :: gradient(:,:)
80-
integer :: input_channels, input_width, output_width
8172
integer :: j, n, k
8273
integer :: iws, iwe
8374
real :: gdz(self % filters, self % width)
8475
real :: db_local(self % filters, self % width)
8576
real :: dw_local(self % filters, self % width, self % channels, self % kernel_size)
8677

87-
input_channels = size(input, dim=1)
88-
input_width = size(input, dim=2)
89-
output_width = self % width
90-
91-
do j = 1, output_width
78+
do j = 1, self % width
9279
gdz(:, j) = gradient(:, j) * self % activation % eval_prime(self % z(:, j))
9380
end do
9481

9582
do n = 1, self % filters
96-
do j = 1, output_width
83+
do j = 1, self % width
9784
db_local(n, j) = gdz(n, j)
9885
end do
9986
end do
@@ -102,7 +89,7 @@ pure module subroutine backward(self, input, gradient)
10289
self % gradient = 0.0
10390

10491
do n = 1, self % filters
105-
do j = 1, output_width
92+
do j = 1, self % width
10693
iws = j
10794
iwe = j + self % kernel_size - 1
10895
do k = 1, self % channels

src/nf/nf_maxpool1d_layer_submodule.f90

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
contains
66

77
pure module function maxpool1d_layer_cons(pool_size, stride) result(res)
8-
implicit none
98
integer, intent(in) :: pool_size
109
integer, intent(in) :: stride
1110
type(maxpool1d_layer) :: res
@@ -15,7 +14,6 @@ end function maxpool1d_layer_cons
1514

1615

1716
module subroutine init(self, input_shape)
18-
implicit none
1917
class(maxpool1d_layer), intent(in out) :: self
2018
integer, intent(in) :: input_shape(:)
2119

@@ -34,7 +32,6 @@ module subroutine init(self, input_shape)
3432
end subroutine init
3533

3634
pure module subroutine forward(self, input)
37-
implicit none
3835
class(maxpool1d_layer), intent(in out) :: self
3936
real, intent(in) :: input(:,:)
4037
integer :: input_width
@@ -70,7 +67,6 @@ pure module subroutine forward(self, input)
7067
end subroutine forward
7168

7269
pure module subroutine backward(self, input, gradient)
73-
implicit none
7470
class(maxpool1d_layer), intent(in out) :: self
7571
real, intent(in) :: input(:,:)
7672
real, intent(in) :: gradient(:,:)

src/nf/nf_maxpool2d_layer_submodule.f90

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
contains
66

77
pure module function maxpool2d_layer_cons(pool_size, stride) result(res)
8-
implicit none
98
integer, intent(in) :: pool_size
109
integer, intent(in) :: stride
1110
type(maxpool2d_layer) :: res
@@ -15,7 +14,6 @@ end function maxpool2d_layer_cons
1514

1615

1716
module subroutine init(self, input_shape)
18-
implicit none
1917
class(maxpool2d_layer), intent(in out) :: self
2018
integer, intent(in) :: input_shape(:)
2119

@@ -39,7 +37,6 @@ end subroutine init
3937

4038

4139
pure module subroutine forward(self, input)
42-
implicit none
4340
class(maxpool2d_layer), intent(in out) :: self
4441
real, intent(in) :: input(:,:,:)
4542
integer :: input_width, input_height
@@ -86,7 +83,6 @@ end subroutine forward
8683

8784

8885
pure module subroutine backward(self, input, gradient)
89-
implicit none
9086
class(maxpool2d_layer), intent(in out) :: self
9187
real, intent(in) :: input(:,:,:)
9288
real, intent(in) :: gradient(:,:,:)

0 commit comments

Comments
 (0)