Skip to content

Commit 6ae7142

Browse files
committed
Merge remote-tracking branch 'upstream/main' into linear2d_layer
2 parents 8cf5cb5 + a28a9be commit 6ae7142

21 files changed

+321
-37
lines changed

CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,8 @@ add_library(neural-fortran
3030
src/nf/nf_flatten_layer_submodule.f90
3131
src/nf/nf_input1d_layer.f90
3232
src/nf/nf_input1d_layer_submodule.f90
33+
src/nf/nf_input2d_layer.f90
34+
src/nf/nf_input2d_layer_submodule.f90
3335
src/nf/nf_input3d_layer.f90
3436
src/nf/nf_input3d_layer_submodule.f90
3537
src/nf/nf_layer_constructors.f90

LICENSE

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
MIT License
22

3-
Copyright (c) 2018-2024 neural-fortran contributors
3+
Copyright (c) 2018-2025 neural-fortran contributors
44

55
Permission is hereby granted, free of charge, to any person obtaining a copy
66
of this software and associated documentation files (the "Software"), to deal

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ Read the paper [here](https://arxiv.org/abs/1902.06714).
2929

3030
| Layer type | Constructor name | Supported input layers | Rank of output array | Forward pass | Backward pass |
3131
|------------|------------------|------------------------|----------------------|--------------|---------------|
32-
| Input | `input` | n/a | 1, 3 | n/a | n/a |
32+
| Input | `input` | n/a | 1, 2, 3 | n/a | n/a |
3333
| Dense (fully-connected) | `dense` | `input1d`, `flatten` | 1 |||
3434
| Convolutional (2-d) | `conv2d` | `input3d`, `conv2d`, `maxpool2d`, `reshape` | 3 || ✅(*) |
3535
| Max-pooling (2-d) | `maxpool2d` | `input3d`, `conv2d`, `maxpool2d`, `reshape` | 3 |||

fpm.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
name = "neural-fortran"
2-
version = "0.18.0"
2+
version = "0.19.0"
33
license = "MIT"
44
author = "Milan Curcic"
5-
maintainer = "[email protected]"
6-
copyright = "Copyright 2018-2024, neural-fortran contributors"
5+
maintainer = "[email protected]"
6+
copyright = "Copyright 2018-2025, neural-fortran contributors"

src/nf/nf_input2d_layer.f90

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
module nf_input2d_layer
2+
3+
!! This module provides the `input2d_layer` type.
4+
5+
use nf_base_layer, only: base_layer
6+
implicit none
7+
8+
private
9+
public :: input2d_layer
10+
11+
type, extends(base_layer) :: input2d_layer
12+
real, allocatable :: output(:,:)
13+
contains
14+
procedure :: init
15+
procedure :: set
16+
end type input2d_layer
17+
18+
interface input2d_layer
19+
pure module function input2d_layer_cons(output_shape) result(res)
20+
!! Create a new instance of the 2-d input layer.
21+
!! Only used internally by the `layer % init` method.
22+
integer, intent(in) :: output_shape(2)
23+
!! Shape of the input layer
24+
type(input2d_layer) :: res
25+
!! 2-d input layer instance
26+
end function input2d_layer_cons
27+
end interface input2d_layer
28+
29+
interface
30+
31+
module subroutine init(self, input_shape)
32+
!! Only here to satisfy the language rules
33+
!! about deferred methods of abstract types.
34+
!! This method does nothing for this type and should not be called.
35+
class(input2d_layer), intent(in out) :: self
36+
integer, intent(in) :: input_shape(:)
37+
end subroutine init
38+
39+
pure module subroutine set(self, values)
40+
class(input2d_layer), intent(in out) :: self
41+
!! Layer instance
42+
real, intent(in) :: values(:,:)
43+
!! Values to set
44+
end subroutine set
45+
46+
end interface
47+
48+
end module nf_input2d_layer
Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
submodule(nf_input2d_layer) nf_input2d_layer_submodule
2+
implicit none
3+
contains
4+
5+
pure module function input2d_layer_cons(output_shape) result(res)
6+
integer, intent(in) :: output_shape(2)
7+
type(input2d_layer) :: res
8+
allocate(res % output(output_shape(1), output_shape(2)))
9+
res % output = 0
10+
end function input2d_layer_cons
11+
12+
module subroutine init(self, input_shape)
13+
class(input2d_layer), intent(in out) :: self
14+
integer, intent(in) :: input_shape(:)
15+
end subroutine init
16+
17+
pure module subroutine set(self, values)
18+
class(input2d_layer), intent(in out) :: self
19+
real, intent(in) :: values(:,:)
20+
self % output = values
21+
end subroutine set
22+
23+
end submodule nf_input2d_layer_submodule

src/nf/nf_layer.f90

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,12 +35,14 @@ module nf_layer
3535

3636
! Specific subroutines for different array ranks
3737
procedure, private :: backward_1d
38+
procedure, private :: backward_2d
3839
procedure, private :: backward_3d
3940
procedure, private :: get_output_1d
41+
procedure, private :: get_output_2d
4042
procedure, private :: get_output_3d
4143

42-
generic :: backward => backward_1d, backward_3d
43-
generic :: get_output => get_output_1d, get_output_3d
44+
generic :: backward => backward_1d, backward_2d, backward_3d
45+
generic :: get_output => get_output_1d, get_output_2d, get_output_3d
4446

4547
end type layer
4648

@@ -59,6 +61,19 @@ pure module subroutine backward_1d(self, previous, gradient)
5961
!! Array of gradient values from the next layer
6062
end subroutine backward_1d
6163

64+
pure module subroutine backward_2d(self, previous, gradient)
65+
!! Apply a backward pass on the layer.
66+
!! This changes the internal state of the layer.
67+
!! This is normally called internally by the `network % backward`
68+
!! method.
69+
class(layer), intent(in out) :: self
70+
!! Layer instance
71+
class(layer), intent(in) :: previous
72+
!! Previous layer instance
73+
real, intent(in) :: gradient(:, :)
74+
!! Array of gradient values from the next layer
75+
end subroutine backward_2d
76+
6277
pure module subroutine backward_3d(self, previous, gradient)
6378
!! Apply a backward pass on the layer.
6479
!! This changes the internal state of the layer.
@@ -95,6 +110,14 @@ pure module subroutine get_output_1d(self, output)
95110
!! Output values from this layer
96111
end subroutine get_output_1d
97112

113+
pure module subroutine get_output_2d(self, output)
114+
!! Returns the output values (activations) from this layer.
115+
class(layer), intent(in) :: self
116+
!! Layer instance
117+
real, allocatable, intent(out) :: output(:,:)
118+
!! Output values from this layer
119+
end subroutine get_output_2d
120+
98121
pure module subroutine get_output_3d(self, output)
99122
!! Returns the output values (activations) from a layer with a 3-d output
100123
!! (e.g. input3d, conv2d)

src/nf/nf_layer_constructors.f90

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,10 +35,10 @@ module function input1d(layer_size) result(res)
3535
!! Resulting layer instance
3636
end function input1d
3737

38-
module function input3d(layer_shape) result(res)
39-
!! 3-d input layer constructor.
38+
module function input2d(dim1, dim2) result(res)
39+
!! 2-d input layer constructor.
4040
!!
41-
!! This layer is for inputting 3-d data to the network.
41+
!! This layer is for inputting 2-d data to the network.
4242
!! Currently, this layer must be followed by a conv2d layer.
4343
!! An input layer must be the first layer in the network.
4444
!!
@@ -50,10 +50,29 @@ module function input3d(layer_shape) result(res)
5050
!! ```
5151
!! use nf, only :: input, layer
5252
!! type(layer) :: input_layer
53-
!! input_layer = input([28, 28, 1])
53+
!! input_layer = input(28, 28)
54+
!! ```
55+
integer, intent(in) :: dim1, dim2
56+
!! First and second dimension sizes
57+
type(layer) :: res
58+
!! Resulting layer instance
59+
end function input2d
60+
61+
module function input3d(dim1, dim2, dim3) result(res)
62+
!! 3-d input layer constructor.
63+
!!
64+
!! This is a specific function that is available
65+
!! under a generic name `input`.
66+
!!
67+
!! Example:
68+
!!
69+
!! ```
70+
!! use nf, only :: input, layer
71+
!! type(layer) :: input_layer
72+
!! input_layer = input(28, 28, 1)
5473
!! ```
55-
integer, intent(in) :: layer_shape(3)
56-
!! Shape of the input layer
74+
integer, intent(in) :: dim1, dim2, dim3
75+
!! First, second and third dimension sizes
5776
type(layer) :: res
5877
!! Resulting layer instance
5978
end function input3d

src/nf/nf_layer_constructors_submodule.f90

Lines changed: 18 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
use nf_dense_layer, only: dense_layer
66
use nf_flatten_layer, only: flatten_layer
77
use nf_input1d_layer, only: input1d_layer
8+
use nf_input2d_layer, only: input2d_layer
89
use nf_input3d_layer, only: input3d_layer
910
use nf_maxpool2d_layer, only: maxpool2d_layer
1011
use nf_reshape_layer, only: reshape3d_layer
@@ -82,16 +83,28 @@ module function input1d(layer_size) result(res)
8283
end function input1d
8384

8485

85-
module function input3d(layer_shape) result(res)
86-
integer, intent(in) :: layer_shape(3)
86+
module function input2d(dim1, dim2) result(res)
87+
integer, intent(in) :: dim1, dim2
8788
type(layer) :: res
8889
res % name = 'input'
89-
res % layer_shape = layer_shape
90+
res % layer_shape = [dim1, dim2]
9091
res % input_layer_shape = [integer ::]
91-
allocate(res % p, source=input3d_layer(layer_shape))
92+
allocate(res % p, source=input2d_layer([dim1, dim2]))
93+
res % initialized = .true.
94+
end function input2d
95+
96+
97+
module function input3d(dim1, dim2, dim3) result(res)
98+
integer, intent(in) :: dim1, dim2, dim3
99+
type(layer) :: res
100+
res % name = 'input'
101+
res % layer_shape = [dim1, dim2, dim3]
102+
res % input_layer_shape = [integer ::]
103+
allocate(res % p, source=input3d_layer([dim1, dim2, dim3]))
92104
res % initialized = .true.
93105
end function input3d
94106

107+
95108
module function maxpool2d(pool_size, stride) result(res)
96109
integer, intent(in) :: pool_size
97110
integer, intent(in), optional :: stride
@@ -120,6 +133,7 @@ module function maxpool2d(pool_size, stride) result(res)
120133

121134
end function maxpool2d
122135

136+
123137
module function reshape(output_shape) result(res)
124138
integer, intent(in) :: output_shape(:)
125139
type(layer) :: res

src/nf/nf_layer_submodule.f90

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
use nf_dense_layer, only: dense_layer
66
use nf_flatten_layer, only: flatten_layer
77
use nf_input1d_layer, only: input1d_layer
8+
use nf_input2d_layer, only: input2d_layer
89
use nf_input3d_layer, only: input3d_layer
910
use nf_maxpool2d_layer, only: maxpool2d_layer
1011
use nf_reshape_layer, only: reshape3d_layer
@@ -54,6 +55,18 @@ pure module subroutine backward_1d(self, previous, gradient)
5455
end subroutine backward_1d
5556

5657

58+
pure module subroutine backward_2d(self, previous, gradient)
59+
implicit none
60+
class(layer), intent(in out) :: self
61+
class(layer), intent(in) :: previous
62+
real, intent(in) :: gradient(:,:)
63+
64+
! Backward pass from a 2-d layer downstream currently implemented
65+
! only for dense and flatten layers
66+
! CURRENTLY NO LAYERS, tbd: pull/197 and pull/199
67+
end subroutine backward_2d
68+
69+
5770
pure module subroutine backward_3d(self, previous, gradient)
5871
implicit none
5972
class(layer), intent(in out) :: self
@@ -224,6 +237,23 @@ pure module subroutine get_output_1d(self, output)
224237
end subroutine get_output_1d
225238

226239

240+
pure module subroutine get_output_2d(self, output)
241+
implicit none
242+
class(layer), intent(in) :: self
243+
real, allocatable, intent(out) :: output(:,:)
244+
245+
select type(this_layer => self % p)
246+
247+
type is(input2d_layer)
248+
allocate(output, source=this_layer % output)
249+
class default
250+
error stop '1-d output can only be read from an input1d, dense, or flatten layer.'
251+
252+
end select
253+
254+
end subroutine get_output_2d
255+
256+
227257
pure module subroutine get_output_3d(self, output)
228258
implicit none
229259
class(layer), intent(in) :: self
@@ -299,6 +329,8 @@ elemental module function get_num_params(self) result(num_params)
299329
select type (this_layer => self % p)
300330
type is (input1d_layer)
301331
num_params = 0
332+
type is (input2d_layer)
333+
num_params = 0
302334
type is (input3d_layer)
303335
num_params = 0
304336
type is (dense_layer)
@@ -326,6 +358,8 @@ module function get_params(self) result(params)
326358
select type (this_layer => self % p)
327359
type is (input1d_layer)
328360
! No parameters to get.
361+
type is (input2d_layer)
362+
! No parameters to get.
329363
type is (input3d_layer)
330364
! No parameters to get.
331365
type is (dense_layer)
@@ -353,6 +387,8 @@ module function get_gradients(self) result(gradients)
353387
select type (this_layer => self % p)
354388
type is (input1d_layer)
355389
! No gradients to get.
390+
type is (input2d_layer)
391+
! No gradients to get.
356392
type is (input3d_layer)
357393
! No gradients to get.
358394
type is (dense_layer)
@@ -398,6 +434,11 @@ module subroutine set_params(self, params)
398434
write(stderr, '(a)') 'Warning: calling set_params() ' &
399435
// 'on a zero-parameter layer; nothing to do.'
400436

437+
type is (input2d_layer)
438+
! No parameters to set.
439+
write(stderr, '(a)') 'Warning: calling set_params() ' &
440+
// 'on a zero-parameter layer; nothing to do.'
441+
401442
type is (input3d_layer)
402443
! No parameters to set.
403444
write(stderr, '(a)') 'Warning: calling set_params() ' &

0 commit comments

Comments
 (0)