GCC Code Coverage Report


Directory: src/athena/
File: athena_dropout_layer.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 0 0 100.0%
Functions: 0 0 -%
Branches: 0 0 -%

Line Branch Exec Source
1 module athena__dropout_layer
2 !! Module containing implementation of a dropout layer
3 !!
4 !! This module implements dropout regularisation, randomly zeroing elements
5 !! during training to prevent overfitting and co-adaptation of neurons.
6 !!
7 !! Mathematical operation (training):
8 !! \[
9 !! y_i = { 0 if r_i < p
10 !! { x_i / (1-p) otherwise
11 !! \]
12 !! where:
13 !! - \( y_i \) is the output
14 !! - \( x_i \) is the input
15 !! - \( p \) is the dropout probability (rate)
16 !! - \( r_i \) is a random variable uniformly distributed in [0,1]
17 !!
18 !! Scaling by $$1/(1-p)$$ maintains expected value: $$E[y_i] = x_i$$
19 !!
20 !! Inference: acts as identity (no dropout applied)
21 !! \[
22 !! y_i = x_i
23 !! \]
24 !!
25 !! Benefits: Prevents overfitting, ensemble effect, forces redundancy
26 !! Typical p values: 0.2-0.5 (higher dropout for larger networks)
27 !! Reference: Srivastava et al. (2014), JMLR
28 use coreutils, only: real32, stop_program
29 use athena__base_layer, only: drop_layer_type, base_layer_type
30 use diffstruc, only: array_type, operator(*)
31 use athena__misc_types, only: &
32 onnx_node_type, onnx_initialiser_type, onnx_tensor_type
33 use athena__diffstruc_extd, only: merge_over_channels
34 implicit none
35
36
37 private
38
39 public :: dropout_layer_type
40 public :: read_dropout_layer, create_from_onnx_dropout_layer
41
42
43 type, extends(drop_layer_type) :: dropout_layer_type
44 !! Type for dropout layer with overloaded procedures
45 integer :: idx = 0
46 !! Temporary index of sample (doesn't need to be accurate)
47 integer :: num_masks
48 !! Number of unique masks = number of samples in batch
49 logical, allocatable, dimension(:,:) :: mask
50 !! Mask for dropout
51 contains
52 procedure, pass(this) :: set_hyperparams => set_hyperparams_dropout
53 !! Set hyperparameters for dropout layer
54 procedure, pass(this) :: init => init_dropout
55 !! Initialise dropout layer
56 procedure, pass(this) :: print_to_unit => print_to_unit_dropout
57 !! Print dropout layer to unit
58 procedure, pass(this) :: read => read_dropout
59 !! Read dropout layer from file
60
61 procedure, pass(this) :: forward => forward_dropout
62 !! Forward propagation derived type handler
63
64 procedure, pass(this) :: generate_mask => generate_dropout_mask
65 !! Generate dropout mask
66 end type dropout_layer_type
67
68 interface dropout_layer_type
69 !! Interface for setting up the dropout layer
70 module function layer_setup( &
71 rate, num_masks, &
72 input_shape) result(layer)
73 !! Set up the dropout layer
74 integer, intent(in) :: num_masks
75 !! Number of unique masks
76 real(real32), intent(in) :: rate
77 !! Drop rate
78 integer, dimension(:), optional, intent(in) :: input_shape
79 !! Input shape
80 type(dropout_layer_type) :: layer
81 !! Instance of the dropout layer
82 end function layer_setup
83 end interface dropout_layer_type
84
85
86
87 contains
88
89 !###############################################################################
90 module function layer_setup( &
91 rate, num_masks, &
92 input_shape) result(layer)
93 !! Set up the dropout layer
94 implicit none
95
96 ! Arguments
97 integer, intent(in) :: num_masks
98 !! Number of unique masks
99 real(real32), intent(in) :: rate
100 !! Drop rate
101 integer, dimension(:), optional, intent(in) :: input_shape
102 !! Input shape
103
104 type(dropout_layer_type) :: layer
105 !! Instance of the dropout layer
106
107
108 !---------------------------------------------------------------------------
109 ! Initialise hyperparameters
110 !---------------------------------------------------------------------------
111 call layer%set_hyperparams(rate, num_masks)
112
113
114 !---------------------------------------------------------------------------
115 ! Initialise layer shape
116 !---------------------------------------------------------------------------
117 if(present(input_shape)) call layer%init(input_shape=input_shape)
118
119 end function layer_setup
120 !###############################################################################
121
122
123 !###############################################################################
124 pure subroutine set_hyperparams_dropout(this, rate, num_masks)
125 !! Set hyperparameters for dropout layer
126 implicit none
127
128 ! Arguments
129 class(dropout_layer_type), intent(inout) :: this
130 !! Instance of the dropout layer
131 real(real32), intent(in) :: rate
132 !! Drop rate
133 integer, intent(in) :: num_masks
134 !! Number of unique masks
135
136 this%name = "dropout"
137 this%type = "drop"
138 this%input_rank = 1
139 this%output_rank = 1
140
141 this%num_masks = num_masks
142 this%rate = rate
143
144 end subroutine set_hyperparams_dropout
145 !###############################################################################
146
147
148 !###############################################################################
149 subroutine init_dropout(this, input_shape, verbose)
150 !! Initialise dropout layer
151 implicit none
152
153 ! Arguments
154 class(dropout_layer_type), intent(inout) :: this
155 !! Instance of the dropout layer
156 integer, dimension(:), intent(in) :: input_shape
157 !! Input shape
158 integer, optional, intent(in) :: verbose
159 !! Verbosity level
160
161 ! Local variables
162 integer :: verbose_ = 0
163 !! Verbosity level
164
165
166 !---------------------------------------------------------------------------
167 ! Initialise optional arguments
168 !---------------------------------------------------------------------------
169 if(present(verbose)) verbose_ = verbose
170
171
172 !---------------------------------------------------------------------------
173 ! Initialise input shape
174 !---------------------------------------------------------------------------
175 if(.not.allocated(this%input_shape)) call this%set_shape(input_shape)
176
177
178 !---------------------------------------------------------------------------
179 ! Set up number of channels, width, height
180 !---------------------------------------------------------------------------
181 allocate(this%output_shape(2))
182 this%output_shape = this%input_shape
183
184
185 !---------------------------------------------------------------------------
186 ! Allocate mask
187 !---------------------------------------------------------------------------
188 allocate(this%mask(this%input_shape(1), this%num_masks), source=.true.)
189
190
191 !---------------------------------------------------------------------------
192 ! Generate mask
193 !---------------------------------------------------------------------------
194 call this%generate_mask()
195
196
197 !---------------------------------------------------------------------------
198 ! Generate mask
199 !---------------------------------------------------------------------------
200 if(this%use_graph_input)then
201 call stop_program( &
202 "Graph input not supported for dropout layer" &
203 )
204 return
205 end if
206 if(allocated(this%output)) deallocate(this%output)
207 allocate( this%output(1,1) )
208
209 end subroutine init_dropout
210 !###############################################################################
211
212
213 !###############################################################################
214 subroutine generate_dropout_mask(this)
215 !! Generate dropout mask
216 implicit none
217
218 ! Arguments
219 class(dropout_layer_type), intent(inout) :: this
220 !! Instance of the dropout layer
221
222 ! Local variables
223 real(real32), allocatable, dimension(:,:) :: mask_real
224 !! Real mask
225
226 ! Generate masks
227 !---------------------------------------------------------------------------
228 allocate(mask_real(size(this%mask,1), size(this%mask,2)))
229 call random_number(mask_real) ! Generate random values in [0..1]
230 this%mask = mask_real > this%rate
231
232 this%idx = 0
233
234 end subroutine generate_dropout_mask
235 !###############################################################################
236
237
238 !##############################################################################!
239 ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * !
240 !##############################################################################!
241
242
243 !###############################################################################
244 subroutine print_to_unit_dropout(this, unit)
245 !! Print dropout layer to unit
246 use coreutils, only: to_upper
247 implicit none
248
249 ! Arguments
250 class(dropout_layer_type), intent(in) :: this
251 !! Instance of the dropout layer
252 integer, intent(in) :: unit
253 !! File unit
254
255
256 ! Write initial parameters
257 !---------------------------------------------------------------------------
258 write(unit,'(3X,"INPUT_SHAPE = ",3(1X,I0))') this%input_shape
259 write(unit,'(3X,"RATE = ",F0.9)') this%rate
260 write(unit,'(3X,"NUM_MASKS = ",I0)') this%num_masks
261
262 end subroutine print_to_unit_dropout
263 !###############################################################################
264
265
266 !###############################################################################
267 subroutine read_dropout(this, unit, verbose)
268 !! Read dropout layer from file
269 use athena__tools_infile, only: assign_val, assign_vec
270 use coreutils, only: to_lower, to_upper, icount
271 implicit none
272
273 ! Arguments
274 class(dropout_layer_type), intent(inout) :: this
275 !! Instance of the dropout layer
276 integer, intent(in) :: unit
277 !! File unit
278 integer, optional, intent(in) :: verbose
279 !! Verbosity level
280
281 ! Local variables
282 integer :: verbose_ = 0
283 !! Verbosity level
284 integer :: stat
285 !! File status
286 integer :: itmp1
287 !! Temporary integer
288 integer :: num_masks
289 !! Number of unique masks
290 real(real32) :: rate
291 !! Drop rate
292 integer, dimension(1) :: input_shape
293 !! Input shape
294 character(256) :: buffer, tag, err_msg
295 !! Buffer, tag, and error message
296
297
298 ! Initialise optional arguments
299 !---------------------------------------------------------------------------
300 if(present(verbose)) verbose_ = verbose
301
302
303 ! Loop over tags in layer card
304 !---------------------------------------------------------------------------
305 tag_loop: do
306
307 ! Check for end of file
308 !------------------------------------------------------------------------
309 read(unit,'(A)',iostat=stat) buffer
310 if(stat.ne.0)then
311 write(err_msg,'("file encountered error (EoF?) before END ",A)') &
312 to_upper(this%name)
313 call stop_program(err_msg)
314 return
315 end if
316 if(trim(adjustl(buffer)).eq."") cycle tag_loop
317
318 ! Check for end of layer card
319 !------------------------------------------------------------------------
320 if(trim(adjustl(buffer)).eq."END "//to_upper(trim(this%name)))then
321 backspace(unit)
322 exit tag_loop
323 end if
324
325 tag=trim(adjustl(buffer))
326 if(scan(buffer,"=").ne.0) tag=trim(tag(:scan(tag,"=")-1))
327
328 ! Read parameters from save file
329 !------------------------------------------------------------------------
330 select case(trim(tag))
331 case("INPUT_SHAPE")
332 call assign_vec(buffer, input_shape, itmp1)
333 case("RATE")
334 call assign_val(buffer, rate, itmp1)
335 case("NUM_MASKS")
336 call assign_val(buffer, num_masks, itmp1)
337 case default
338 ! Don't look for "e" due to scientific notation of numbers
339 ! ... i.e. exponent (E+00)
340 if(scan(to_lower(trim(adjustl(buffer))),&
341 'abcdfghijklmnopqrstuvwxyz').eq.0)then
342 cycle tag_loop
343 elseif(tag(:3).eq.'END')then
344 cycle tag_loop
345 end if
346 write(err_msg,'("Unrecognised line in input file: ",A)') &
347 trim(adjustl(buffer))
348 call stop_program(err_msg)
349 return
350 end select
351 end do tag_loop
352
353
354 ! Set hyperparameters and initialise layer
355 !---------------------------------------------------------------------------
356 call this%set_hyperparams(rate = rate, num_masks = num_masks)
357 call this%init(input_shape = input_shape)
358
359
360 ! Check for end of layer card
361 !---------------------------------------------------------------------------
362 read(unit,'(A)') buffer
363 if(trim(adjustl(buffer)).ne."END "//to_upper(trim(this%name)))then
364 write(0,*) trim(adjustl(buffer))
365 write(err_msg,'("END ",A," not where expected")') to_upper(this%name)
366 call stop_program(err_msg)
367 return
368 end if
369
370 end subroutine read_dropout
371 !###############################################################################
372
373
374 !###############################################################################
375 function read_dropout_layer(unit, verbose) result(layer)
376 !! Read dropout layer from file and return layer
377 implicit none
378
379 ! Arguments
380 integer, intent(in) :: unit
381 !! File unit
382 integer, optional, intent(in) :: verbose
383 !! Verbosity level
384 class(base_layer_type), allocatable :: layer
385 !! Instance of the base layer
386
387 ! Local variables
388 integer :: verbose_ = 0
389 !! Verbosity level
390
391 if(present(verbose)) verbose_ = verbose
392 allocate(layer, source=dropout_layer_type(rate=0._real32, num_masks=0))
393 call layer%read(unit, verbose=verbose_)
394
395 end function read_dropout_layer
396 !###############################################################################
397
398
399 !###############################################################################
400 subroutine build_from_onnx_dropout( &
401 this, node, initialisers, value_info, verbose &
402 )
403 !! Read ONNX attributes for dropout layer
404 implicit none
405
406 ! Arguments
407 class(dropout_layer_type), intent(inout) :: this
408 !! Instance of the dropout layer
409 type(onnx_node_type), intent(in) :: node
410 !! ONNX node information
411 type(onnx_initialiser_type), dimension(:), intent(in) :: initialisers
412 !! ONNX initialiser information
413 type(onnx_tensor_type), dimension(:), intent(in) :: value_info
414 !! ONNX value info
415 integer, intent(in) :: verbose
416 !! Verbosity level
417
418 ! Local variables
419 integer :: i
420 !! Loop index
421 real(real32) :: rate
422 !! Dropout rate
423 integer :: num_masks
424 !! Number of masks (batch size)
425 character(256) :: val
426 !! Attribute value
427
428 ! Set default values
429 rate = 0.5_real32
430 num_masks = 1
431
432 ! Parse ONNX attributes
433 do i = 1, size(node%attributes)
434 val = node%attributes(i)%val
435 select case(trim(adjustl(node%attributes(i)%name)))
436 case("ratio")
437 read(val,*) rate
438 case default
439 ! Do nothing
440 write(0,*) "WARNING: Unrecognised attribute in ONNX DROPOUT layer: ", &
441 trim(adjustl(node%attributes(i)%name))
442 end select
443 end do
444
445 ! Check size of initialisers is zero
446 if(size(initialisers).ne.0)then
447 write(0,*) "WARNING: initialisers not used for ONNX DROPOUT layer"
448 end if
449
450 call this%set_hyperparams( &
451 rate = rate, &
452 num_masks = num_masks &
453 )
454
455 end subroutine build_from_onnx_dropout
456 !###############################################################################
457
458
459 !###############################################################################
460 function create_from_onnx_dropout_layer( &
461 node, initialisers, value_info, verbose &
462 ) result(layer)
463 !! Build dropout layer from attributes and return layer
464 implicit none
465
466 ! Arguments
467 type(onnx_node_type), intent(in) :: node
468 !! ONNX node information
469 type(onnx_initialiser_type), dimension(:), intent(in) :: initialisers
470 !! ONNX initialiser information
471 type(onnx_tensor_type), dimension(:), intent(in) :: value_info
472 !! ONNX value info information
473 integer, optional, intent(in) :: verbose
474 !! Verbosity level
475 class(base_layer_type), allocatable :: layer
476 !! Instance of the dropout layer
477
478 ! Local variables
479 integer :: verbose_ = 0
480 !! Verbosity level
481
482 if(present(verbose)) verbose_ = verbose
483
484 allocate(layer, source=dropout_layer_type(rate=0._real32, num_masks=0))
485 call layer%build_from_onnx(node, initialisers, value_info, verbose=verbose_)
486
487 end function create_from_onnx_dropout_layer
488 !###############################################################################
489
490
491 !##############################################################################!
492 ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * !
493 !##############################################################################!
494
495
496 !###############################################################################
497 subroutine forward_dropout(this, input)
498 !! Forward propagation
499 implicit none
500
501 ! Arguments
502 class(dropout_layer_type), intent(inout) :: this
503 !! Instance of the dropout layer
504 class(array_type), dimension(:,:), intent(in) :: input
505 !! Input values
506
507 ! Local variables
508 real(real32) :: rtmp1
509 !! Temporary variable
510 type(array_type), pointer :: ptr
511 !! Pointer array
512
513
514 rtmp1 = 1._real32 - this%rate
515 select case(this%inference)
516 case(.true.)
517 ! Do not perform the drop operation
518 ptr => input(1,1) * rtmp1
519 case default
520 ! Perform the drop operation
521 this%idx = this%idx + 1
522
523 rtmp1 = 1._real32 / rtmp1
524 ptr => merge_over_channels( input(1,1), 0._real32, this%mask) * rtmp1
525 end select
526 call this%output(1,1)%assign_and_deallocate_source(ptr)
527 this%output(1,1)%is_temporary = .false.
528
529 end subroutine forward_dropout
530 !###############################################################################
531
532 end module athena__dropout_layer
533