Skip to content

Commit dc55df0

Browse files
committed
Initialization of network-wide optimizer no longer needed now that we switched to per-layer optimizer instances
1 parent 0e11f10 commit dc55df0

File tree

1 file changed

+8
-12
lines changed

1 file changed

+8
-12
lines changed

src/nf/nf_network_submodule.f90

Lines changed: 8 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -611,8 +611,6 @@ module subroutine train(self, input_data, output_data, batch_size, &
611611

612612
end if
613613

614-
call self % optimizer % init(self % get_num_params())
615-
616614
do n = 1, size(self % layers)
617615
call self % layers(n) % optimizer % init(self % layers(n) % get_num_params())
618616
end do
@@ -690,8 +688,6 @@ module subroutine update(self, optimizer, batch_size)
690688

691689
end if
692690

693-
call self % optimizer % init(self % get_num_params())
694-
695691
do n = 1, size(self % layers)
696692
call self % layers(n) % optimizer % init(self % layers(n) % get_num_params())
697693
end do
@@ -729,29 +725,29 @@ module subroutine update(self, optimizer, batch_size)
729725
type is(dense_layer)
730726
call this_layer % get_params_ptr(weights, biases)
731727
call this_layer % get_gradients_ptr(dw, db)
732-
call self % layers(n) %optimizer % minimize(weights, dw / batch_size_)
733-
call self % layers(n) %optimizer % minimize(biases, db / batch_size_)
728+
call self % layers(n) % optimizer % minimize(weights, dw / batch_size_)
729+
call self % layers(n) % optimizer % minimize(biases, db / batch_size_)
734730
this_layer % dw = 0
735731
this_layer % db = 0
736732
type is(conv1d_layer)
737733
call this_layer % get_params_ptr(weights, biases)
738734
call this_layer % get_gradients_ptr(dw, db)
739-
call self % layers(n) %optimizer % minimize(weights, dw / batch_size_)
740-
call self % layers(n) %optimizer % minimize(biases, db / batch_size_)
735+
call self % layers(n) % optimizer % minimize(weights, dw / batch_size_)
736+
call self % layers(n) % optimizer % minimize(biases, db / batch_size_)
741737
this_layer % dw = 0
742738
this_layer % db = 0
743739
type is(conv2d_layer)
744740
call this_layer % get_params_ptr(weights, biases)
745741
call this_layer % get_gradients_ptr(dw, db)
746-
call self % layers(n) %optimizer % minimize(weights, dw / batch_size_)
747-
call self % layers(n) %optimizer % minimize(biases, db / batch_size_)
742+
call self % layers(n) % optimizer % minimize(weights, dw / batch_size_)
743+
call self % layers(n) % optimizer % minimize(biases, db / batch_size_)
748744
this_layer % dw = 0
749745
this_layer % db = 0
750746
type is(locally_connected1d_layer)
751747
call this_layer % get_params_ptr(weights, biases)
752748
call this_layer % get_gradients_ptr(dw, db)
753-
call self % layers(n) %optimizer % minimize(weights, dw / batch_size_)
754-
call self % layers(n) %optimizer % minimize(biases, db / batch_size_)
749+
call self % layers(n) % optimizer % minimize(weights, dw / batch_size_)
750+
call self % layers(n) % optimizer % minimize(biases, db / batch_size_)
755751
this_layer % dw = 0
756752
this_layer % db = 0
757753
end select

0 commit comments

Comments
 (0)