GCC Code Coverage Report


Directory: src/athena/
File: athena_base_layer_sub.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 1 1 100.0%
Functions: 0 0 -%
Branches: 6 8 75.0%

Line Branch Exec Source
1 submodule(athena__base_layer) athena__base_layer_submodule
2 !! Submodule containing the implementation of the base layer types
3 !!
4 !! This submodule contains the implementation of the base layer types
5 !! used in the ATHENA library. The base layer types are the abstract
6 !! types from which all other layer types are derived. The submodule
7 !! contains the implementation of the procedures that are common to
8 !! all layer types, such as setting the input shape, getting the
9 !! number of parameters, and printing the layer to a file.
10 !!
11 !! The following procedures are based on code from the neural-fortran library
12 !! https://github.com/modern-fortran/neural-fortran/blob/main/src/nf/nf_layer.f90
13 !! procedures:
14 !! - get_num_params*
15 !! - get_params*
16 !! - set_params*
17 !! - get_gradients*
18 !! - set_gradients*
19 use coreutils, only: stop_program, print_warning
20
21 contains
22
23 !###############################################################################
24 module function get_attributes_base(this) result(attributes)
25 !! Get the attributes of the layer (for ONNX export)
26 implicit none
27
28 ! Arguments
29 class(base_layer_type), intent(in) :: this
30 !! Instance of the layer
31 type(onnx_attribute_type), allocatable, dimension(:) :: attributes
32 !! Attributes of the layer
33
34 ! Allocate attributes array
35 allocate(attributes(0))
36 ! attributes(0)%name = this%name
37 ! attributes(0)%val = this%get_type_name()
38 ! attributes(0)%type = ""
39
40 end function get_attributes_base
41 !-------------------------------------------------------------------------------
42 module function get_attributes_conv(this) result(attributes)
43 !! Get the attributes of a convolutional layer (for ONNX export)
44 implicit none
45
46 ! Arguments
47 class(conv_layer_type), intent(in) :: this
48 !! Instance of the layer
49 type(onnx_attribute_type), allocatable, dimension(:) :: attributes
50 !! Attributes of the layer
51
52 ! Local variables
53 character(256) :: buffer, fmt
54 !! Buffer for formatting
55
56 ! Allocate attributes array
57 allocate(attributes(3))
58 attributes(1)%name = "kernel_shape"
59 write(fmt,'("(",I0,"(1X,I0))")') size(this%knl)
60 write(buffer,fmt) this%knl
61 attributes(1)%val = trim(adjustl(buffer))
62 attributes(1)%type = "ints"
63
64 attributes(2)%name = "strides"
65 write(fmt,'("(",I0,"(1X,I0))")') size(this%stp)
66 write(buffer,fmt) this%stp
67 attributes(2)%val = trim(adjustl(buffer))
68 attributes(2)%type = "ints"
69
70 attributes(3)%name = "dilations"
71 write(fmt,'("(",I0,"(1X,I0))")') size(this%dil)
72 write(buffer,fmt) this%dil
73 attributes(3)%val = trim(adjustl(buffer))
74 attributes(3)%type = "ints"
75
76 end function get_attributes_conv
77 !-------------------------------------------------------------------------------
78 module function get_attributes_pool(this) result(attributes)
79 !! Get the attributes of a pooling layer (for ONNX export)
80 implicit none
81
82 ! Arguments
83 class(pool_layer_type), intent(in) :: this
84 !! Instance of the layer
85 type(onnx_attribute_type), allocatable, dimension(:) :: attributes
86 !! Attributes of the layer
87
88 ! Local variables
89 character(256) :: buffer, fmt
90 !! Buffer for formatting
91
92 ! Allocate attributes array
93 allocate(attributes(2))
94 attributes(1)%name = "kernel_shape"
95 write(fmt,'("(",I0,"(1X,I0))")') size(this%pool)
96 write(buffer,fmt) this%pool
97 attributes(1)%val = trim(adjustl(buffer))
98 attributes(1)%type = "ints"
99
100 attributes(2)%name = "strides"
101 write(fmt,'("(",I0,"(1X,I0))")') size(this%strd)
102 write(buffer,fmt) this%strd
103 attributes(2)%val = trim(adjustl(buffer))
104 attributes(2)%type = "ints"
105
106 end function get_attributes_pool
107 !-------------------------------------------------------------------------------
108 module function get_attributes_batch(this) result(attributes)
109 !! Get the attributes of a batch normalisation layer (for ONNX export)
110 implicit none
111
112 ! Arguments
113 class(batch_layer_type), intent(in) :: this
114 !! Instance of the layer
115 type(onnx_attribute_type), allocatable, dimension(:) :: attributes
116 !! Attributes of the layer
117
118 ! Local variables
119 character(256) :: buffer, fmt
120 !! Buffer for formatting
121
122 ! Allocate attributes array
123 allocate(attributes(4))
124 attributes(1)%name = "epsilon"
125 write(buffer,'("(",F0.6,")")') this%epsilon
126 attributes(1)%val = trim(adjustl(buffer))
127 attributes(1)%type = "float"
128
129 attributes(2)%name = "momentum"
130 write(buffer,'("(",F0.6,")")') this%momentum
131 attributes(2)%val = trim(adjustl(buffer))
132 attributes(2)%type = "float"
133
134 attributes(3)%name = "scale"
135 write(fmt,'("(",I0,"(1X,I0))")') this%num_channels
136 write(buffer,fmt) this%params(1)%val(1:this%num_channels,1)
137 attributes(3)%val = trim(adjustl(buffer))
138 attributes(3)%type = "float"
139
140 attributes(4)%name = "B"
141 write(fmt,'("(",I0,"(1X,I0))")') this%num_channels
142 write(buffer,fmt) this%params(1)%val(this%num_channels+1:2*this%num_channels,1)
143 attributes(4)%val = trim(adjustl(buffer))
144 attributes(4)%type = "float"
145
146 end function get_attributes_batch
147 !###############################################################################
148
149
150 !###############################################################################
151 module subroutine build_from_onnx_base( &
152 this, node, initialisers, value_info, verbose &
153 )
154 !! Build layer from ONNX node and initialiser
155 implicit none
156
157 ! Arguments
158 class(base_layer_type), intent(inout) :: this
159 !! Instance of the layer
160 type(onnx_node_type), intent(in) :: node
161 !! ONNX node
162 type(onnx_initialiser_type), dimension(:), intent(in) :: initialisers
163 !! ONNX initialisers
164 type(onnx_tensor_type), dimension(:), intent(in) :: value_info
165 !! ONNX value info
166 integer, intent(in) :: verbose
167 !! Verbosity level
168
169 write(0,*) "build_from_onnx_base: " // &
170 trim(this%name) // " layer cannot be built from ONNX"
171
172 end subroutine build_from_onnx_base
173 !###############################################################################
174
175
176 !###############################################################################
177 module subroutine set_rank_base(this, input_rank, output_rank)
178 !! Set the input and output ranks of the layer
179 implicit none
180
181 ! Arguments
182 class(base_layer_type), intent(inout) :: this
183 !! Instance of the layer
184 integer, intent(in) :: input_rank
185 !! Input rank
186 integer, intent(in) :: output_rank
187 !! Output rank
188
189 !---------------------------------------------------------------------------
190 ! Set input and output ranks
191 !---------------------------------------------------------------------------
192 call stop_program("set_rank_base: this layer cannot have its rank set")
193
194 end subroutine set_rank_base
195 !###############################################################################
196
197
198 !###############################################################################
199 module subroutine set_shape_base(this, input_shape)
200 !! Set the input shape of the layer
201 implicit none
202
203 ! Arguments
204 class(base_layer_type), intent(inout) :: this
205 !! Instance of the layer
206 integer, dimension(:), intent(in) :: input_shape
207 !! Input shape
208 character(len=100) :: err_msg
209 !! Error message
210
211 !---------------------------------------------------------------------------
212 ! initialise input shape
213 !---------------------------------------------------------------------------
214 if(size(input_shape,dim=1).eq.this%input_rank)then
215 this%input_shape = input_shape
216 else
217 write(err_msg,'("Invalid size of input_shape in ",A,&
218 &" expected (",I0,"), got (",I0,")")') &
219 trim(this%name), this%input_rank, size(input_shape,dim=1)
220 call stop_program(err_msg)
221 return
222 end if
223
224 end subroutine set_shape_base
225 !###############################################################################
226
227
228 !###############################################################################
229 module subroutine extract_output_base(this, output)
230 !! Get the output of the layer
231 implicit none
232
233 ! Arguments
234 class(base_layer_type), intent(in) :: this
235 !! Instance of the layer
236 real(real32), allocatable, dimension(..), intent(out) :: output
237 !! Output of the Layer
238
239 if(size(this%output).gt.1)then
240 call print_warning("extract_output_base: output has more than one"&
241 &" sample, cannot extract")
242 return
243 end if
244
245 call this%output(1,1)%extract(output)
246
247 end subroutine extract_output_base
248 !###############################################################################
249
250
251 !###############################################################################
252 pure module function get_num_params_base(this) result(num_params)
253 !! Get the number of parameters in the layer
254 implicit none
255
256 ! Arguments
257 class(base_layer_type), intent(in) :: this
258 !! Instance of the layer
259 integer :: num_params
260 !! Number of parameters
261
262 ! No parameters in the base layer
263 num_params = 0
264
265 end function get_num_params_base
266 !-------------------------------------------------------------------------------
267 pure module function get_num_params_conv(this) result(num_params)
268 !! Get the number of parameters in convolutional layer
269 implicit none
270
271 ! Arguments
272 class(conv_layer_type), intent(in) :: this
273 !! Instance of the layer
274 integer :: num_params
275 !! Number of parameters
276
277 ! num_filters x num_channels x kernel_size + num_biases
278 ! num_biases = num_filters
279 num_params = this%num_filters * this%num_channels * product(this%knl) + &
280 this%num_filters
281
282 end function get_num_params_conv
283 !-------------------------------------------------------------------------------
284 pure module function get_num_params_batch(this) result(num_params)
285 !! Get the number of parameters in batch normalisation layer
286 implicit none
287
288 ! Arguments
289 class(batch_layer_type), intent(in) :: this
290 !! Instance of the layer
291 integer :: num_params
292 !! Number of parameters
293
294 ! num_filters x num_channels x kernel_size + num_biases
295 ! num_biases = num_filters
296 num_params = 2 * this%num_channels
297
298 end function get_num_params_batch
299 !###############################################################################
300
301
302 !###############################################################################
303 module subroutine forward_base(this, input)
304 !! Forward pass for the layer
305 implicit none
306
307 ! Arguments
308 class(base_layer_type), intent(inout) :: this
309 !! Instance of the layer
310 class(array_type), dimension(:,:), intent(in) :: input
311 !! Input data
312
313 ! Local variables
314 integer :: i, j
315 !! Loop indices
316
317 do i = 1, size(input, 1)
318 do j = 1, size(input, 2)
319 if(.not.input(i,j)%allocated)then
320 call stop_program('Input to input layer not allocated')
321 return
322 end if
323 this%output(i,j) = input(i,j)
324 end do
325 end do
326
327 end subroutine forward_base
328 !-------------------------------------------------------------------------------
329 module function forward_eval_base(this, input) result(output)
330 !! Forward pass of layer and return output for evaluation
331 implicit none
332
333 ! Arguments
334 class(base_layer_type), intent(inout), target :: this
335 !! Instance of the layer
336 class(array_type), dimension(:,:), intent(in) :: input
337 !! Input data
338 type(array_type), pointer :: output(:,:)
339 !! Output data
340
341 call this%forward(input)
342 output => this%output
343 end function forward_eval_base
344 !###############################################################################
345
346
347 !###############################################################################
348 module subroutine set_graph_base(this, graph)
349 !! Set the graph structure of the input data
350 implicit none
351
352 ! Arguments
353 class(base_layer_type), intent(inout) :: this
354 !! Instance of the layer
355 type(graph_type), dimension(:), intent(in) :: graph
356 !! Graph structure of input data
357
358 ! Local variables
359 integer :: s
360 !! Loop index
361
362 if(allocated(this%graph))then
363 if(size(this%graph).ne.size(graph))then
364 deallocate(this%graph)
365 allocate(this%graph(size(graph)))
366 end if
367 else
368 allocate(this%graph(size(graph)))
369 end if
370 do s = 1, size(graph)
371 this%graph(s)%adj_ia = graph(s)%adj_ia
372 this%graph(s)%adj_ja = graph(s)%adj_ja
373 this%graph(s)%edge_weights = graph(s)%edge_weights
374 this%graph(s)%num_edges = graph(s)%num_edges
375 this%graph(s)%num_vertices = graph(s)%num_vertices
376 end do
377
378 end subroutine set_graph_base
379 !###############################################################################
380
381
382 !###############################################################################
383 module subroutine nullify_graph_base(this)
384 !! Nullify the forward pass data of the layer to free memory
385 implicit none
386
387 ! Arguments
388 class(base_layer_type), intent(inout) :: this
389 !! Instance of the layer
390
391 ! Local variables
392 integer :: i, j
393 !! Loop indices
394
395 do i = 1, size(this%output,1)
396 do j = 1, size(this%output,2)
397 call this%output(i,j)%nullify_graph()
398 end do
399 end do
400
401 end subroutine nullify_graph_base
402 !###############################################################################
403
404
405 !###############################################################################
406 module subroutine reduce_learnable(this, input)
407 !! Merge two learnable layers via summation
408 implicit none
409
410 ! Arguments
411 class(learnable_layer_type), intent(inout) :: this
412 !! Instance of the layer
413 class(learnable_layer_type), intent(in) :: input
414 !! Instance of a layer
415
416 ! Local variables
417 integer :: i
418 !! Loop index
419
420 if(allocated(this%params).and.allocated(input%params))then
421 if(size(this%params).ne.size(input%params))then
422 call stop_program("reduce_learnable: incompatible parameter sizes")
423 return
424 end if
425 do i = 1, size(this%params,1)
426 this%params(i) = this%params(i) + input%params(i)
427 if(associated(this%params(i)%grad).and.&
428 associated(input%params(i)%grad))then
429 this%params(i)%grad = this%params(i)%grad + &
430 input%params(i)%grad
431 end if
432 end do
433 else
434 call stop_program("reduce_learnable: unallocated parameter arrays")
435 return
436 end if
437
438 end subroutine reduce_learnable
439 !###############################################################################
440
441
442 !###############################################################################
443 module function add_learnable(a, b) result(output)
444 !! Add two learnable layers together
445 implicit none
446
447 ! Arguments
448 class(learnable_layer_type), intent(in) :: a, b
449 !! Instances of layers
450 class(learnable_layer_type), allocatable :: output
451 !! Output layer
452
453 ! Local variables
454 integer :: i
455 !! Loop index
456
457 output = a
458 if(allocated(a%params).and.allocated(b%params))then
459 if(size(a%params).ne.size(b%params))then
460 call stop_program("add_learnable: incompatible parameter sizes")
461 return
462 end if
463 do i = 1, size(a%params,1)
464 output%params(i)%grad => null()
465 output%params(i) = a%params(i) + b%params(i)
466 if(associated(a%params(i)%grad).and.&
467 associated(b%params(i)%grad))then
468 allocate(output%params(i)%grad)
469 output%params(i)%grad = a%params(i)%grad + &
470 b%params(i)%grad
471 end if
472 end do
473 else
474 call stop_program("add_learnable: unallocated parameter arrays")
475 return
476 end if
477
478 end function add_learnable
479 !###############################################################################
480
481
482 !###############################################################################
483 pure module function get_params(this) result(params)
484 !! Get the learnable parameters of the layer
485 !!
486 !! This function returns the learnable parameters of the layer
487 !! as a single array.
488 !! This has been modified from the neural-fortran library
489 implicit none
490
491 ! Arguments
492 class(learnable_layer_type), intent(in) :: this
493 !! Instance of the layer
494 real(real32), dimension(this%num_params) :: params
495 !! Learnable parameters
496
497 ! Local variables
498 integer :: i, start_idx, end_idx
499 !! Loop indices
500
501 start_idx = 0
502 end_idx = 0
503 do i = 1, size(this%params)
504 start_idx = end_idx + 1
505 end_idx = start_idx + size(this%params(i)%val,1) - 1
506 params(start_idx:end_idx) = this%params(i)%val(:,1)
507 end do
508
509 end function get_params
510 !###############################################################################
511
512
513 !###############################################################################
514 module subroutine set_params(this, params)
515 !! Set the learnable parameters of the layer
516 !!
517 !! This function sets the learnable parameters of the layer
518 !! from a single array.
519 !! This has been modified from the neural-fortran library
520 implicit none
521
522 ! Arguments
523 class(learnable_layer_type), intent(inout) :: this
524 !! Instance of the layer
525 real(real32), dimension(this%num_params), intent(in) :: params
526 !! Learnable parameters
527
528 ! Local variables
529 integer :: i, start_idx, end_idx
530 !! Loop indices
531
532 if(.not.allocated(this%params)) then
533 call stop_program("set_params: params not allocated")
534 return
535 end if
536 start_idx = 0
537 end_idx = 0
538 do i = 1, size(this%params)
539 start_idx = end_idx + 1
540 end_idx = start_idx + size(this%params(i)%val,1) - 1
541 this%params(i)%val(:,1) = params(start_idx:end_idx)
542 end do
543
544 end subroutine set_params
545 !###############################################################################
546
547
548 !###############################################################################
549 pure module function get_gradients(this, clip_method) result(gradients)
550 !! Get the gradients of the layer
551 !!
552 !! This function returns the gradients of the layer as a single array.
553 !! This has been modified from the neural-fortran library
554 use athena__clipper, only: clip_type
555 implicit none
556
557 ! Arguments
558 class(learnable_layer_type), intent(in) :: this
559 !! Instance of the layer
560 type(clip_type), optional, intent(in) :: clip_method
561 !! Method to clip the gradients
562 real(real32), dimension(this%num_params) :: gradients
563 !! Gradients of the layer
564
565 ! Local variables
566 integer :: i, start_idx, end_idx
567 !! Loop indices
568
569 if(.not.allocated(this%params)) then
570 return
571 end if
572 start_idx = 0
573 end_idx = 0
574 do i = 1, size(this%params)
575 start_idx = end_idx + 1
576 end_idx = start_idx + size(this%params(i)%val,1) - 1
577 if(.not.associated(this%params(i)%grad)) then
578 gradients(start_idx:end_idx) = 0._real32
579 else
580 gradients(start_idx:end_idx) = this%params(i)%grad%val(:,1)
581 end if
582 end do
583
584 if(present(clip_method)) call clip_method%apply(size(gradients),gradients)
585
586 end function get_gradients
587 !###############################################################################
588
589
590 !###############################################################################
591 module subroutine set_gradients(this, gradients)
592 !! Set the gradients of the layer
593 !!
594 !! This function sets the gradients of the layer from a single array.
595 !! This has been modified from the neural-fortran library
596 implicit none
597
598 ! Arguments
599 class(learnable_layer_type), intent(inout) :: this
600 !! Instance of the layer
601 real(real32), dimension(..), intent(in) :: gradients
602 !! Gradients of the layer
603
604 ! Local variables
605 integer :: i, start_idx, end_idx
606 !! Loop indices
607
608 start_idx = 0
609 end_idx = 0
610 select rank(gradients)
611 rank(0)
612 do i = 1, size(this%params)
613 if(.not.associated(this%params(i)%grad)) then
614 this%params(i)%grad => this%params(i)%create_result()
615 end if
616 this%params(i)%grad%val(:,1) = gradients
617 end do
618 rank(1)
619 do i = 1, size(this%params)
620 if(.not.associated(this%params(i)%grad)) then
621 this%params(i)%grad => this%params(i)%create_result()
622 end if
623 start_idx = end_idx + 1
624 end_idx = start_idx + size(this%params(i)%val,1) - 1
625 this%params(i)%grad%val(:,1) = gradients(start_idx:end_idx)
626 end do
627 end select
628
629 end subroutine set_gradients
630 !###############################################################################
631
632
6/8
✓ Branch 0 (2→3) taken 8 times.
✓ Branch 1 (2→4) taken 14 times.
✓ Branch 2 (3→4) taken 8 times.
✗ Branch 3 (3→5) not taken.
✓ Branch 4 (6→7) taken 14 times.
✓ Branch 5 (6→46) taken 8 times.
✓ Branch 6 (46→47) taken 8 times.
✗ Branch 7 (46→100) not taken.
22 end submodule athena__base_layer_submodule
633