@@ -283,7 +283,6 @@ end function get_activation_by_name
283
283
pure module subroutine backward(self, output)
284
284
class(network), intent (in out ) :: self
285
285
real , intent (in ) :: output(:)
286
- real , allocatable :: gradient(:)
287
286
integer :: n, num_layers
288
287
289
288
num_layers = size (self % layers)
@@ -296,18 +295,25 @@ pure module subroutine backward(self, output)
296
295
! Output layer; apply the loss function
297
296
select type (this_layer = > self % layers(n) % p)
298
297
type is (dense_layer)
299
- gradient = quadratic_derivative(output, this_layer % output)
298
+ call self % layers(n) % backward( &
299
+ self % layers(n - 1 ), &
300
+ quadratic_derivative(output, this_layer % output) &
301
+ )
300
302
end select
301
303
else
302
304
! Hidden layer; take the gradient from the next layer
303
305
select type (next_layer = > self % layers(n + 1 ) % p)
304
306
type is (dense_layer)
305
- gradient = next_layer % gradient
307
+ call self % layers(n) % backward(self % layers(n - 1 ), next_layer % gradient)
308
+ type is (flatten_layer)
309
+ call self % layers(n) % backward(self % layers(n - 1 ), next_layer % gradient)
310
+ type is (conv2d_layer)
311
+ call self % layers(n) % backward(self % layers(n - 1 ), next_layer % gradient)
312
+ type is (maxpool2d_layer)
313
+ call self % layers(n) % backward(self % layers(n - 1 ), next_layer % gradient)
306
314
end select
307
315
end if
308
316
309
- call self % layers(n) % backward(self % layers(n - 1 ), gradient)
310
-
311
317
end do
312
318
313
319
end subroutine backward
0 commit comments