GCC Code Coverage Report


Directory: src/athena/
File: athena_container_layer_sub.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 0 0 100.0%
Functions: 0 0 -%
Branches: 0 0 -%

Line Branch Exec Source
1 submodule(athena__container_layer) athena__container_layer_submodule
2 !! Submodule containing the implementation for the container layer
3 !!
4 !! This submodule contains the implementation of the container layer
5 !! which is a container for an individual layer.
6 !! This also provides the initialisation of the list of layer types
7 !! that can be used for reading layers into a network model from a file.
8 use athena__base_layer, only: learnable_layer_type
9 use athena__actv_layer, only: read_actv_layer, create_from_onnx_actv_layer
10 use athena__avgpool1d_layer, only: read_avgpool1d_layer
11 use athena__avgpool2d_layer, only: read_avgpool2d_layer
12 use athena__avgpool3d_layer, only: read_avgpool3d_layer
13 use athena__batchnorm1d_layer, only: read_batchnorm1d_layer
14 use athena__batchnorm2d_layer, only: read_batchnorm2d_layer
15 use athena__batchnorm3d_layer, only: read_batchnorm3d_layer
16 use athena__conv1d_layer, only: read_conv1d_layer
17 use athena__conv2d_layer, only: read_conv2d_layer
18 use athena__conv3d_layer, only: read_conv3d_layer
19 use athena__dropblock2d_layer, only: read_dropblock2d_layer
20 use athena__dropblock3d_layer, only: read_dropblock3d_layer
21 use athena__dropout_layer, only: read_dropout_layer, create_from_onnx_dropout_layer
22 use athena__duvenaud_msgpass_layer, only: read_duvenaud_msgpass_layer
23 use athena__flatten_layer, only: read_flatten_layer, create_from_onnx_flatten_layer
24 use athena__full_layer, only: read_full_layer, create_from_onnx_full_layer
25 use athena__input_layer, only: read_input_layer, create_from_onnx_input_layer
26 use athena__kipf_msgpass_layer, only: read_kipf_msgpass_layer
27 use athena__maxpool1d_layer, only: read_maxpool1d_layer
28 use athena__maxpool2d_layer, only: read_maxpool2d_layer
29 use athena__maxpool3d_layer, only: read_maxpool3d_layer
30 use athena__pad1d_layer, only: read_pad1d_layer
31 use athena__pad2d_layer, only: read_pad2d_layer
32 use athena__pad3d_layer, only: read_pad3d_layer
33 use athena__recurrent_layer, only: read_recurrent_layer
34 use athena__reshape_layer, only: read_reshape_layer, create_from_onnx_reshape_layer
35
36 use athena__onnx_creators, only: &
37 create_from_onnx_avgpool_layer, &
38 create_from_onnx_batchnorm_layer, &
39 create_from_onnx_conv_layer, &
40 create_from_onnx_maxpool_layer, &
41 create_from_onnx_pad_layer
42
43 contains
44
45 module subroutine finalise_container_layer(this)
46 !! Finalise the container layer
47 implicit none
48 class(container_layer_type), intent(inout) :: this
49
50 if (allocated(this%layer)) deallocate(this%layer)
51
52 end subroutine finalise_container_layer
53
54 !###############################################################################
55
56 #if defined(GFORTRAN)
57 subroutine container_reduction(this, rhs)
58 implicit none
59 class(container_layer_type), intent(inout) :: this
60 class(container_layer_type), intent(in) :: rhs
61
62 select type(layer_this => this%layer)
63 class is(learnable_layer_type)
64 select type(layer_rhs => rhs%layer)
65 class is(learnable_layer_type)
66 call layer_this%reduce(layer_rhs)
67 end select
68 end select
69
70 end subroutine container_reduction
71 #endif
72
73
74 module subroutine allocate_list_of_layer_types(addit_list)
75 implicit none
76 type(read_layer_container), dimension(:), intent(in), optional :: &
77 addit_list
78
79
80 if(.not.allocated(list_of_layer_types)) allocate(list_of_layer_types(0))
81 list_of_layer_types = [ &
82 list_of_layer_types, &
83 read_layer_container('actv', read_actv_layer), &
84 read_layer_container('avgpool1d', read_avgpool1d_layer), &
85 read_layer_container('avgpool2d', read_avgpool2d_layer), &
86 read_layer_container('avgpool3d', read_avgpool3d_layer), &
87 read_layer_container('batchnorm1d', read_batchnorm1d_layer), &
88 read_layer_container('batchnorm2d', read_batchnorm2d_layer), &
89 read_layer_container('batchnorm3d', read_batchnorm3d_layer), &
90 read_layer_container('conv1d', read_conv1d_layer), &
91 read_layer_container('conv2d', read_conv2d_layer), &
92 read_layer_container('conv3d', read_conv3d_layer), &
93 read_layer_container('dropblock2d', read_dropblock2d_layer), &
94 read_layer_container('dropblock3d', read_dropblock3d_layer), &
95 read_layer_container('dropout', read_dropout_layer), &
96 read_layer_container('duvenaud', read_duvenaud_msgpass_layer), &
97 read_layer_container('flatten', read_flatten_layer), &
98 read_layer_container('full', read_full_layer), &
99 read_layer_container('input', read_input_layer), &
100 read_layer_container('kipf', read_kipf_msgpass_layer), &
101 read_layer_container('maxpool1d', read_maxpool1d_layer), &
102 read_layer_container('maxpool2d', read_maxpool2d_layer), &
103 read_layer_container('maxpool3d', read_maxpool3d_layer), &
104 read_layer_container('pad1d', read_pad1d_layer), &
105 read_layer_container('pad2d', read_pad2d_layer), &
106 read_layer_container('pad3d', read_pad3d_layer), &
107 read_layer_container('recurrent', read_recurrent_layer), &
108 read_layer_container('reshape', read_reshape_layer) &
109 ]
110 if(present(addit_list))then
111 list_of_layer_types = [list_of_layer_types, addit_list]
112 end if
113
114 end subroutine allocate_list_of_layer_types
115
116 module subroutine allocate_list_of_onnx_layer_creators(addit_list)
117 implicit none
118 type(onnx_create_layer_container), dimension(:), intent(in), optional :: &
119 addit_list
120
121 ! make a global create_from_onnx_conv_layer that allocates depending on the attributes
122 if(.not.allocated(list_of_onnx_layer_creators)) &
123 allocate(list_of_onnx_layer_creators(0))
124 list_of_onnx_layer_creators = [ &
125 list_of_onnx_layer_creators, &
126 onnx_create_layer_container('AvgPool', create_from_onnx_avgpool_layer), &
127 onnx_create_layer_container('BatchNormalization', &
128 create_from_onnx_batchnorm_layer &
129 ), &
130 onnx_create_layer_container('Conv', create_from_onnx_conv_layer), &
131 onnx_create_layer_container('Dropout', create_from_onnx_dropout_layer), &
132 onnx_create_layer_container('Flatten', create_from_onnx_flatten_layer), &
133 onnx_create_layer_container('MatMul', create_from_onnx_full_layer), &
134 onnx_create_layer_container('MaxPool', create_from_onnx_maxpool_layer), &
135 onnx_create_layer_container('Pad', create_from_onnx_pad_layer), &
136 onnx_create_layer_container('Relu', create_from_onnx_actv_layer), &
137 onnx_create_layer_container('Reshape', create_from_onnx_reshape_layer), &
138 onnx_create_layer_container('Selu', create_from_onnx_actv_layer), &
139 onnx_create_layer_container('Sigmoid', create_from_onnx_actv_layer), &
140 onnx_create_layer_container('Softmax', create_from_onnx_actv_layer), &
141 onnx_create_layer_container('Swish', create_from_onnx_actv_layer), &
142 onnx_create_layer_container('Tanh', create_from_onnx_actv_layer) &
143 ]
144 if(present(addit_list))then
145 list_of_onnx_layer_creators = [list_of_onnx_layer_creators, addit_list]
146 end if
147
148 end subroutine allocate_list_of_onnx_layer_creators
149
150 end submodule athena__container_layer_submodule
151