Skip to content

Commit a08fba0

Browse files
committed
Bug fixes; now everything works
1 parent 5d62b13 commit a08fba0

File tree

6 files changed

+25
-25
lines changed

6 files changed

+25
-25
lines changed

example/cnn_mnist_1d.f90

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
program cnn_mnist_1d
22

33
use nf, only: network, sgd, &
4-
input, conv2d, maxpool1d, maxpool2d, flatten, dense, reshape, reshape2d, locally_connected_1d, &
4+
input, conv1d, conv2d, maxpool1d, maxpool2d, flatten, dense, reshape, reshape2d, locally_connected_1d, &
55
load_mnist, label_digits, softmax, relu
66

77
implicit none
@@ -12,7 +12,7 @@ program cnn_mnist_1d
1212
real, allocatable :: validation_images(:,:), validation_labels(:)
1313
real, allocatable :: testing_images(:,:), testing_labels(:)
1414
integer :: n
15-
integer, parameter :: num_epochs = 10
15+
integer, parameter :: num_epochs = 25
1616

1717
call load_mnist(training_images, training_labels, &
1818
validation_images, validation_labels, &
@@ -21,9 +21,9 @@ program cnn_mnist_1d
2121
net = network([ &
2222
input(784), &
2323
reshape2d([28,28]), &
24-
locally_connected_1d(filters=8, kernel_size=3, activation=relu()), &
24+
conv1d(filters=8, kernel_size=3, activation=relu()), &
2525
maxpool1d(pool_size=2), &
26-
locally_connected_1d(filters=16, kernel_size=3, activation=relu()), &
26+
conv1d(filters=16, kernel_size=3, activation=relu()), &
2727
maxpool1d(pool_size=2), &
2828
dense(10, activation=softmax()) &
2929
])
@@ -37,7 +37,7 @@ program cnn_mnist_1d
3737
label_digits(training_labels), &
3838
batch_size=16, &
3939
epochs=1, &
40-
optimizer=sgd(learning_rate=0.003) &
40+
optimizer=sgd(learning_rate=0.005) &
4141
)
4242

4343
print '(a,i2,a,f5.2,a)', 'Epoch ', n, ' done, Accuracy: ', accuracy( &

src/nf/nf_layer_submodule.f90

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,8 @@ pure module subroutine backward_2d(self, previous, gradient)
121121
call this_layer % backward(prev_layer % output, gradient)
122122
type is(reshape2d_layer)
123123
call this_layer % backward(prev_layer % output, gradient)
124+
type is(input2d_layer)
125+
call this_layer % backward(prev_layer % output, gradient)
124126
type is(locally_connected_1d_layer)
125127
call this_layer % backward(prev_layer % output, gradient)
126128
type is(conv1d_layer)
@@ -134,6 +136,8 @@ pure module subroutine backward_2d(self, previous, gradient)
134136
call this_layer % backward(prev_layer % output, gradient)
135137
type is(reshape2d_layer)
136138
call this_layer % backward(prev_layer % output, gradient)
139+
type is(input2d_layer)
140+
call this_layer % backward(prev_layer % output, gradient)
137141
type is(locally_connected_1d_layer)
138142
call this_layer % backward(prev_layer % output, gradient)
139143
type is(conv1d_layer)
@@ -149,6 +153,8 @@ pure module subroutine backward_2d(self, previous, gradient)
149153
call this_layer % backward(prev_layer % output, gradient)
150154
type is(locally_connected_1d_layer)
151155
call this_layer % backward(prev_layer % output, gradient)
156+
type is(input2d_layer)
157+
call this_layer % backward(prev_layer % output, gradient)
152158
type is(conv1d_layer)
153159
call this_layer % backward(prev_layer % output, gradient)
154160
end select

src/nf/nf_network_submodule.f90

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
submodule(nf_network) nf_network_submodule
22

3+
use nf_conv1d_layer, only: conv1d_layer
34
use nf_conv2d_layer, only: conv2d_layer
45
use nf_dense_layer, only: dense_layer
56
use nf_dropout_layer, only: dropout_layer
@@ -76,9 +77,9 @@ module function network_from_layers(layers) result(res)
7677
type is(conv2d_layer)
7778
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
7879
n = n + 1
79-
!type is(locally_connected_1d_layer)
80-
!res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
81-
!n = n + 1
80+
type is(locally_connected_1d_layer)
81+
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
82+
n = n + 1
8283
type is(maxpool2d_layer)
8384
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
8485
n = n + 1
@@ -88,6 +89,9 @@ module function network_from_layers(layers) result(res)
8889
type is(maxpool1d_layer)
8990
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
9091
n = n + 1
92+
type is(conv1d_layer)
93+
res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
94+
n = n + 1
9195
!type is(reshape2d_layer)
9296
! res % layers = [res % layers(:n-1), flatten(), res % layers(n:)]
9397
! n = n + 1
@@ -179,6 +183,10 @@ module subroutine backward(self, output, loss)
179183
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
180184
type is(reshape2d_layer)
181185
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
186+
type is(conv1d_layer)
187+
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
188+
type is(locally_connected_1d_layer)
189+
call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient)
182190
end select
183191
end if
184192

test/test_conv1d_layer.f90

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -51,10 +51,6 @@ program test_conv1d_layer
5151
allocate(sample_input(1, 3))
5252
sample_input = 0
5353

54-
! Print the sample input array
55-
print *, "Sample Input:"
56-
print *, sample_input
57-
5854
input_layer = input(1, 3)
5955
conv1d_layer = conv1d(filters, kernel_size)
6056
call conv1d_layer % init(input_layer)
@@ -66,10 +62,6 @@ program test_conv1d_layer
6662
call conv1d_layer % forward(input_layer)
6763
call conv1d_layer % get_output(output)
6864

69-
! Print the output array after the forward pass
70-
print *, "Output:"
71-
print *, output
72-
7365
if (.not. all(abs(output) < tolerance)) then
7466
ok = .false.
7567
write(stderr, '(a)') 'conv1d layer with zero input and sigmoid function must forward to all 0.5.. failed'

test/test_conv1d_network.f90

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ program test_conv1d_network
99
real, allocatable :: sample_input(:,:), output(:,:)
1010
logical :: ok = .true.
1111

12-
! 2-layer convolutional network
12+
! 3-layer convolutional network
1313
net = network([ &
1414
input(3, 32), &
1515
conv1d(filters=16, kernel_size=3), &
@@ -26,7 +26,7 @@ program test_conv1d_network
2626
sample_input = 0
2727

2828
call net % forward(sample_input)
29-
call net % layers(2) % get_output(output)
29+
call net % layers(3) % get_output(output)
3030

3131
if (.not. all(shape(output) == [32, 28])) then
3232
write(stderr, '(a)') 'conv1d network output should have correct shape.. failed'
@@ -64,6 +64,7 @@ program test_conv1d_network
6464
end do
6565

6666
if (.not. n <= num_iterations) then
67+
6768
write(stderr, '(a)') &
6869
'convolutional network 1 should converge in simple training.. failed'
6970
ok = .false.

test/test_locally_connected_1d_layer.f90

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -51,10 +51,6 @@ program test_locally_connected_1d_layer
5151
allocate(sample_input(1, 3))
5252
sample_input = 0
5353

54-
! Print the sample input array
55-
print *, "Sample Input:"
56-
print *, sample_input
57-
5854
input_layer = input(1, 3)
5955
locally_connected_1d_layer = locally_connected_1d(filters, kernel_size)
6056
call locally_connected_1d_layer % init(input_layer)
@@ -66,9 +62,6 @@ program test_locally_connected_1d_layer
6662
call locally_connected_1d_layer % forward(input_layer)
6763
call locally_connected_1d_layer % get_output(output)
6864

69-
! Print the output array after the forward pass
70-
print *, "Output:"
71-
print *, output
7265

7366
if (.not. all(abs(output) < tolerance)) then
7467
ok = .false.

0 commit comments

Comments
 (0)