GCC Code Coverage Report


Directory: src/athena/
File: athena_base_layer_sub_init.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 0 0 100.0%
Functions: 0 0 -%
Branches: 0 0 -%

Line Branch Exec Source
1 submodule(athena__base_layer) athena__base_layer_submodule_init
2 !! Submodule containing the implementation of the base layer types
3 !!
4 !! This submodule contains the implementation of the base layer types
5 !! used in the ATHENA library. The base layer types are the abstract
6 !! types from which all other layer types are derived. The submodule
7 !! contains the implementation of the initialisation procedures
8 use coreutils, only: stop_program
9 use athena__diffstruc_extd, only: batchnorm_array_type
10
11 contains
12
13 !###############################################################################
14 module subroutine init_pad(this, input_shape, verbose)
15 !! Initialise padding layer
16 implicit none
17
18 ! Arguments
19 class(pad_layer_type), intent(inout) :: this
20 !! Instance of the padding layer
21 integer, dimension(:), intent(in) :: input_shape
22 !! Input shape
23 integer, optional, intent(in) :: verbose
24 !! Verbosity level
25
26 ! Local variables
27 integer :: i
28 !! Loop index
29 integer :: verbose_ = 0
30 !! Verbosity level
31
32
33 !---------------------------------------------------------------------------
34 ! Initialise optional arguments
35 !---------------------------------------------------------------------------
36 if(present(verbose)) verbose_ = verbose
37
38
39 !---------------------------------------------------------------------------
40 ! Initialise input shape
41 !---------------------------------------------------------------------------
42 if(.not.allocated(this%input_shape)) call this%set_shape(input_shape)
43 if(.not.allocated(this%orig_bound)) then
44 allocate(this%orig_bound(2,this%input_rank-1))
45 allocate(this%dest_bound(2,this%input_rank-1))
46 end if
47 do i = 1, this%input_rank - 1
48 this%orig_bound(:,i) = [ 1, this%input_shape(i) ]
49 this%dest_bound(:,i) = [ 1, this%input_shape(i) + this%pad(i) * 2 ]
50 call this%facets(i)%setup_bounds( &
51 length = this%input_shape(:this%input_rank-1), &
52 pad = this%pad, &
53 imethod = this%imethod &
54 )
55 end do
56
57
58 !---------------------------------------------------------------------------
59 ! Set up number of channels, width, height
60 !---------------------------------------------------------------------------
61 this%num_channels = this%input_shape(this%input_rank)
62 if(allocated(this%output_shape)) deallocate(this%output_shape)
63 allocate( this%output_shape(this%input_rank) )
64 this%output_shape(this%input_rank) = this%input_shape(this%input_rank)
65 this%output_shape(:this%input_rank-1) = &
66 this%input_shape(:this%input_rank-1) + this%pad(:) * 2
67
68
69 !---------------------------------------------------------------------------
70 ! Allocate arrays
71 !---------------------------------------------------------------------------
72 if(this%use_graph_input)then
73 call stop_program("Graph input not supported for padding layer")
74 return
75 end if
76 if(allocated(this%output)) deallocate(this%output)
77 allocate( this%output(1,1) )
78
79 end subroutine init_pad
80 !###############################################################################
81
82
83 !###############################################################################
84 module subroutine init_pool(this, input_shape, verbose)
85 !! Initialise pooling layer
86 implicit none
87
88 ! Arguments
89 class(pool_layer_type), intent(inout) :: this
90 !! Instance of the pooling layer
91 integer, dimension(:), intent(in) :: input_shape
92 !! Input shape
93 integer, optional, intent(in) :: verbose
94 !! Verbosity level
95
96 ! Local variables
97 integer :: verbose_ = 0
98 !! Verbosity level
99
100
101 !---------------------------------------------------------------------------
102 ! Initialise optional arguments
103 !---------------------------------------------------------------------------
104 if(present(verbose)) verbose_ = verbose
105
106
107 !---------------------------------------------------------------------------
108 ! Initialise input shape
109 !---------------------------------------------------------------------------
110 if(.not.allocated(this%input_shape)) call this%set_shape(input_shape)
111
112
113 !---------------------------------------------------------------------------
114 ! Set up number of channels, width, height
115 !---------------------------------------------------------------------------
116 this%num_channels = this%input_shape(this%input_rank)
117 if(allocated(this%output_shape)) deallocate(this%output_shape)
118 allocate( this%output_shape(this%input_rank) )
119 this%output_shape(this%input_rank) = this%input_shape(this%input_rank)
120 this%output_shape(:this%input_rank-1) = &
121 floor( &
122 ( &
123 this%input_shape(:this%input_rank-1) - this%pool &
124 ) / real(this%strd) &
125 ) + 1
126
127
128 !---------------------------------------------------------------------------
129 ! Allocate arrays
130 !---------------------------------------------------------------------------
131 if(this%use_graph_input)then
132 call stop_program( &
133 "Graph input not supported for pooling layer" &
134 )
135 return
136 end if
137 if(allocated(this%output)) deallocate(this%output)
138 allocate( this%output(1,1) )
139
140 end subroutine init_pool
141 !###############################################################################
142
143
144 !###############################################################################
145 module subroutine init_conv(this, input_shape, verbose)
146 !! Initialise convolutional layer
147 use athena__initialiser, only: initialiser_setup
148 use athena__misc_types, only: base_init_type
149 implicit none
150
151 ! Arguments
152 class(conv_layer_type), intent(inout) :: this
153 !! Instance of the layer
154 integer, dimension(:), intent(in) :: input_shape
155 !! Input shape
156 integer, dimension(this%input_rank-1) :: pad_shape
157 integer, optional, intent(in) :: verbose
158 !! Verbosity level
159
160 ! Local variables
161 integer :: verbose_ = 0
162 !! Verbosity level
163
164
165 !---------------------------------------------------------------------------
166 ! initialise optional arguments
167 !---------------------------------------------------------------------------
168 if(present(verbose)) verbose_ = verbose
169
170
171 !---------------------------------------------------------------------------
172 ! initialise input shape
173 !---------------------------------------------------------------------------
174 if(.not.allocated(this%input_shape)) call this%set_shape(input_shape)
175
176
177 !---------------------------------------------------------------------------
178 ! initialise padding layer, if allocated
179 !---------------------------------------------------------------------------
180 if(allocated(this%pad_layer))then
181 call this%pad_layer%init(this%input_shape, verbose_)
182 pad_shape = pad_shape + 2 * this%pad_layer%pad
183 else
184 pad_shape = 0
185 end if
186
187
188 !---------------------------------------------------------------------------
189 ! allocate output, activation, bias, and weight shapes
190 !---------------------------------------------------------------------------
191 ! NOTE: INPUT SHAPE DOES NOT INCLUDE PADDING WIDTH
192 ! THIS IS HANDLED AUTOMATICALLY BY THE CODE
193 ! ... provide the initial input data shape and let us deal with the padding
194 this%num_channels = this%input_shape(this%input_rank)
195 if(allocated(this%output_shape)) deallocate(this%output_shape)
196 allocate( this%output_shape(this%input_rank) )
197 this%output_shape(this%input_rank) = this%num_filters
198 this%output_shape(:this%input_rank-1) = floor( &
199 ( &
200 this%input_shape(:this%input_rank-1) + 2 * pad_shape - this%knl &
201 ) / real(this%stp) &
202 ) + 1
203 this%num_params = this%get_num_params()
204 allocate(this%weight_shape(this%input_rank + 1,1))
205 this%weight_shape(:,1) = [ this%knl, this%num_channels, this%num_filters ]
206 this%bias_shape = [this%num_filters]
207
208 if(allocated(this%params)) deallocate(this%params)
209 allocate(this%params(2))
210 call this%params(1)%allocate([this%weight_shape(:,1), 1])
211 call this%params(1)%set_requires_grad(.true.)
212 this%params(1)%is_sample_dependent = .false.
213 call this%params(2)%allocate([this%bias_shape, 1])
214 call this%params(2)%set_requires_grad(.true.)
215 this%params(2)%is_sample_dependent = .false.
216
217
218 !---------------------------------------------------------------------------
219 ! initialise weights (kernels)
220 !---------------------------------------------------------------------------
221 call this%kernel_init%initialise( &
222 this%params(1)%val(:,1), &
223 fan_in = product(this%knl)+1, fan_out = 1, &
224 spacing = [ this%knl, this%num_channels, this%num_filters ] &
225 )
226
227 ! initialise biases
228 !---------------------------------------------------------------------------
229 call this%bias_init%initialise( &
230 this%params(2)%val(:,1), &
231 fan_in = product(this%knl)+1, fan_out = 1 &
232 )
233
234
235 !---------------------------------------------------------------------------
236 ! Allocate arrays
237 !---------------------------------------------------------------------------
238 if(this%use_graph_input)then
239 call stop_program( &
240 "Graph input not supported for convolutional layer" &
241 )
242 return
243 end if
244 if(allocated(this%output)) deallocate(this%output)
245 allocate( this%output(1,1) )
246
247 end subroutine init_conv
248 !###############################################################################
249
250
251 !###############################################################################
252 module subroutine init_batch(this, input_shape, verbose)
253 !! Initialise batch normalisation layer
254 use athena__initialiser, only: initialiser_setup
255 use athena__misc_types, only: base_init_type
256 implicit none
257
258 ! Arguments
259 class(batch_layer_type), intent(inout) :: this
260 !! Instance of the layer
261 integer, dimension(:), intent(in) :: input_shape
262 !! Input shape
263 integer, optional, intent(in) :: verbose
264 !! Verbosity level
265
266 integer :: verbose_ = 0
267
268
269 !---------------------------------------------------------------------------
270 ! initialise optional arguments
271 !---------------------------------------------------------------------------
272 if(present(verbose)) verbose_ = verbose
273
274
275 !---------------------------------------------------------------------------
276 ! initialise input shape
277 !---------------------------------------------------------------------------
278 if(.not.allocated(this%input_shape)) call this%set_shape(input_shape)
279
280
281 !---------------------------------------------------------------------------
282 ! set up number of channels, width, height
283 !---------------------------------------------------------------------------
284 if(allocated(this%output)) deallocate(this%output)
285 allocate(this%output_shape(this%input_rank))
286 if(size(this%input_shape).eq.1)then
287 this%output_shape(1) = this%input_shape(1)
288 this%output_shape(2) = 1
289 else
290 this%output_shape = this%input_shape
291 end if
292 this%num_channels = this%input_shape(this%input_rank)
293 this%num_params = this%get_num_params()
294 allocate(this%params(1))
295 call this%params(1)%allocate([2 * this%num_channels, 1])
296 call this%params(1)%set_requires_grad(.true.)
297 allocate(this%weight_shape(1,1))
298 this%weight_shape(:,1) = [ this%num_channels ]
299 this%bias_shape = [this%num_channels]
300
301
302 !---------------------------------------------------------------------------
303 ! allocate mean and variance
304 !---------------------------------------------------------------------------
305 allocate(this%mean(this%num_channels), source=0._real32)
306 allocate(this%variance, source=this%mean)
307
308
309 !---------------------------------------------------------------------------
310 ! initialise gamma
311 !---------------------------------------------------------------------------
312 call this%kernel_init%initialise(this%params(1)%val(1:this%num_channels,1), &
313 fan_in =this%num_channels, &
314 fan_out=this%num_channels)
315
316 ! initialise beta
317 !---------------------------------------------------------------------------
318 call this%bias_init%initialise(this%params(1)%val(this%num_channels+1:,1), &
319 fan_in =this%num_channels, &
320 fan_out=this%num_channels)
321
322
323 !---------------------------------------------------------------------------
324 ! initialise moving mean
325 !---------------------------------------------------------------------------
326 call this%moving_mean_init%initialise(this%mean, &
327 fan_in =this%num_channels, &
328 fan_out=this%num_channels)
329
330 ! initialise moving variance
331 !---------------------------------------------------------------------------
332 call this%moving_variance_init%initialise(this%variance, &
333 fan_in =this%num_channels, &
334 fan_out=this%num_channels)
335
336
337 !---------------------------------------------------------------------------
338 ! Allocate arrays
339 !---------------------------------------------------------------------------
340 if(this%use_graph_input)then
341 call stop_program( &
342 "Graph input not supported for batch normalisation layer" &
343 )
344 return
345 end if
346 if(allocated(this%output)) deallocate(this%output)
347 allocate( batchnorm_array_type :: this%output(1,1) )
348
349 end subroutine init_batch
350 !###############################################################################
351
352 end submodule athena__base_layer_submodule_init
353