GCC Code Coverage Report


Directory: src/athena/
File: athena_batchnorm1d_layer.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 0 0 100.0%
Functions: 0 0 -%
Branches: 0 0 -%

Line Branch Exec Source
1 module athena__batchnorm1d_layer
2 !! Module containing implementation of 0D and 1D batch normalisation layers
3 !!
4 !! This module implements batch normalisation for 3D convolutional layers,
5 !! normalizing activations across the batch dimension.
6 !!
7 !! Mathematical operation (training):
8 !! \[ \mu_\mathcal{B} = \frac{1}{m}\sum_{i=1}^{m} x_i \]
9 !! \[ \sigma^2_\mathcal{B} = \frac{1}{m}\sum_{i=1}^{m} (x_i - \mu_\mathcal{B})^2 \]
10 !! \[ \hat{x}_i = \frac{x_i - \mu_\mathcal{B}}{\sqrt{\sigma^2_\mathcal{B} + \epsilon}} \]
11 !! \[ y_i = \gamma \hat{x}_i + \beta \]
12 !!
13 !! where \(\gamma, \beta\) are learnable parameters, \(\epsilon\) is stability constant
14 !!
15 !! Inference: uses running statistics
16 !! \(\mu_{\text{running}}, \sigma^2_{\text{running}}\) from training
17 !!
18 !! Benefits: Reduces internal covariate shift, enables higher learning rates,
19 !! acts as regularisation, reduces dependence on initialisation
20 !! Reference: Ioffe & Szegedy (2015), ICML
21 use coreutils, only: real32, stop_program, print_warning
22 use athena__base_layer, only: batch_layer_type, base_layer_type
23 use athena__misc_types, only: base_init_type, &
24 onnx_node_type, onnx_initialiser_type, onnx_tensor_type
25 use diffstruc, only: array_type
26 use athena__diffstruc_extd, only: batchnorm_array_type, &
27 batchnorm, batchnorm_inference
28 implicit none
29
30
31 private
32
33 public :: batchnorm1d_layer_type
34 public :: read_batchnorm1d_layer
35
36
37 type, extends(batch_layer_type) :: batchnorm1d_layer_type
38 !! Type for 0D or 1D batch normalisation layer with overloaded procedures
39 contains
40 procedure, pass(this) :: set_hyperparams => set_hyperparams_batchnorm1d
41 !! Set hyperparameters for 1D batch normalisation layer
42 procedure, pass(this) :: read => read_batchnorm1d
43 !! Read 1D batch normalisation layer from file
44
45 procedure, pass(this) :: forward => forward_batchnorm1d
46 !! Forward propagation derived type handler
47
48 final :: finalise_batchnorm1d
49 !! Finalise 1D batch normalisation layer
50 end type batchnorm1d_layer_type
51
52
53 interface batchnorm1d_layer_type
54 !! Interface for setting up the 1D batch normalisation layer
55 module function layer_setup( &
56 input_shape, &
57 num_channels, num_inputs, &
58 momentum, epsilon, &
59 gamma_init_mean, gamma_init_std, &
60 beta_init_mean, beta_init_std, &
61 gamma_initialiser, beta_initialiser, &
62 moving_mean_initialiser, moving_variance_initialiser, &
63 verbose &
64 ) result(layer)
65 !! Set up the 1D batch normalisation layer
66 integer, dimension(:), optional, intent(in) :: input_shape
67 !! Input shape
68 integer, optional, intent(in) :: num_channels, num_inputs
69 !! Number of channels and inputs
70 real(real32), optional, intent(in) :: momentum, epsilon
71 !! Momentum and epsilon
72 real(real32), optional, intent(in) :: gamma_init_mean, gamma_init_std
73 !! Gamma initialisation mean and standard deviation
74 real(real32), optional, intent(in) :: beta_init_mean, beta_init_std
75 !! Beta initialisation mean and standard deviation
76 class(*), optional, intent(in) :: &
77 gamma_initialiser, beta_initialiser, &
78 moving_mean_initialiser, moving_variance_initialiser
79 !! Initialisers
80 integer, optional, intent(in) :: verbose
81 !! Verbosity level
82 type(batchnorm1d_layer_type) :: layer
83 !! Instance of the 1D batch normalisation layer
84 end function layer_setup
85 end interface batchnorm1d_layer_type
86
87
88
89 contains
90
91 !###############################################################################
92 subroutine finalise_batchnorm1d(this)
93 !! Finalise 1D batch normalisation layer
94 implicit none
95
96 ! Arguments
97 type(batchnorm1d_layer_type), intent(inout) :: this
98 !! Instance of the 1D batch normalisation layer
99
100 if(allocated(this%mean)) deallocate(this%mean)
101 if(allocated(this%variance)) deallocate(this%variance)
102 if(allocated(this%input_shape)) deallocate(this%input_shape)
103 if(allocated(this%output)) deallocate(this%output)
104
105 end subroutine finalise_batchnorm1d
106 !###############################################################################
107
108
109 !##############################################################################!
110 ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * !
111 !##############################################################################!
112
113
114 !###############################################################################
115 module function layer_setup( &
116 input_shape, &
117 num_channels, num_inputs, &
118 momentum, epsilon, &
119 gamma_init_mean, gamma_init_std, &
120 beta_init_mean, beta_init_std, &
121 gamma_initialiser, beta_initialiser, &
122 moving_mean_initialiser, moving_variance_initialiser, &
123 verbose &
124 ) result(layer)
125 !! Set up the 1D batch normalisation layer
126 use athena__initialiser, only: initialiser_setup
127 implicit none
128
129 ! Arguments
130 integer, dimension(:), optional, intent(in) :: input_shape
131 !! Input shape
132 integer, optional, intent(in) :: num_channels, num_inputs
133 !! Number of channels and inputs
134 real(real32), optional, intent(in) :: momentum, epsilon
135 !! Momentum and epsilon
136 real(real32), optional, intent(in) :: gamma_init_mean, gamma_init_std
137 !! Gamma initialisation mean and standard deviation
138 real(real32), optional, intent(in) :: beta_init_mean, beta_init_std
139 !! Beta initialisation mean and standard deviation
140 class(*), optional, intent(in) :: &
141 gamma_initialiser, beta_initialiser, &
142 moving_mean_initialiser, moving_variance_initialiser
143 !! Initialisers
144 integer, optional, intent(in) :: verbose
145 !! Verbosity level
146
147 type(batchnorm1d_layer_type) :: layer
148 !! Instance of the 1D batch normalisation layer
149
150 ! Local variables
151 integer :: verbose_ = 0
152 !! Verbosity level
153 character(256) :: err_msg
154 !! Error message
155 class(base_init_type), allocatable :: &
156 gamma_initialiser_, beta_initialiser_, &
157 moving_mean_initialiser_, moving_variance_initialiser_
158 !! Initialisers
159
160
161 if(present(verbose)) verbose_ = verbose
162
163 !---------------------------------------------------------------------------
164 ! Set up momentum and epsilon
165 !---------------------------------------------------------------------------
166 if(present(momentum))then
167 layer%momentum = momentum
168 else
169 layer%momentum = 0._real32
170 end if
171 if(present(epsilon))then
172 layer%epsilon = epsilon
173 else
174 layer%epsilon = 1.E-5_real32
175 end if
176
177
178 !---------------------------------------------------------------------------
179 ! Set up initialiser mean and standard deviations
180 !---------------------------------------------------------------------------
181 if(present(gamma_init_mean)) layer%gamma_init_mean = gamma_init_mean
182 if(present(gamma_init_std)) layer%gamma_init_std = gamma_init_std
183 if(present(beta_init_mean)) layer%beta_init_mean = beta_init_mean
184 if(present(beta_init_std)) layer%beta_init_std = beta_init_std
185
186
187 !---------------------------------------------------------------------------
188 ! Define gamma and beta initialisers
189 !---------------------------------------------------------------------------
190 if(present(gamma_initialiser))then
191 gamma_initialiser_ = initialiser_setup(gamma_initialiser)
192 end if
193 if(present(beta_initialiser))then
194 beta_initialiser_ = initialiser_setup(beta_initialiser)
195 end if
196 if(present(moving_mean_initialiser))then
197 moving_mean_initialiser_ = initialiser_setup(moving_mean_initialiser)
198 end if
199 if(present(moving_variance_initialiser))then
200 moving_variance_initialiser_ = initialiser_setup(moving_variance_initialiser)
201 end if
202
203
204 !---------------------------------------------------------------------------
205 ! Set hyperparameters
206 !---------------------------------------------------------------------------
207 call layer%set_hyperparams( &
208 momentum = layer%momentum, epsilon = layer%epsilon, &
209 gamma_init_mean = layer%gamma_init_mean, &
210 gamma_init_std = layer%gamma_init_std, &
211 beta_init_mean = layer%beta_init_mean, &
212 beta_init_std = layer%beta_init_std, &
213 gamma_initialiser = gamma_initialiser_, &
214 beta_initialiser = beta_initialiser_, &
215 moving_mean_initialiser = moving_mean_initialiser_, &
216 moving_variance_initialiser = moving_variance_initialiser_, &
217 verbose = verbose_ &
218 )
219
220
221 !---------------------------------------------------------------------------
222 ! Initialise layer shape
223 !---------------------------------------------------------------------------
224 if(present(input_shape))then
225 if(present(num_channels).or.present(num_inputs))then
226 write(err_msg,'(A)') &
227 "both input_shape and num_channels/num_inputs present" // &
228 achar(13) // achar(10) // &
229 "These represent the same parameter, so are conflicting"
230 call stop_program(err_msg)
231 return
232 end if
233 if(size(input_shape).eq.1)then
234 call layer%init(input_shape= [ 1, input_shape ] )
235 else
236 call layer%init(input_shape= input_shape)
237 end if
238 elseif(present(num_channels).and.present(num_inputs))then
239 call layer%init(input_shape=[num_inputs, num_channels])
240 elseif(present(num_channels))then
241 call layer%init(input_shape=[1, num_channels])
242 elseif(present(num_inputs))then
243 call layer%init(input_shape=[num_inputs, 1])
244 end if
245
246 end function layer_setup
247 !###############################################################################
248
249
250 !###############################################################################
251 subroutine set_hyperparams_batchnorm1d( &
252 this, &
253 momentum, epsilon, &
254 gamma_init_mean, gamma_init_std, &
255 beta_init_mean, beta_init_std, &
256 gamma_initialiser, beta_initialiser, &
257 moving_mean_initialiser, moving_variance_initialiser, &
258 verbose )
259 !! Set hyperparameters for 1D batch normalisation layer
260 use athena__initialiser, only: initialiser_setup
261 implicit none
262
263 ! Arguments
264 class(batchnorm1d_layer_type), intent(inout) :: this
265 !! Instance of the 1D batch normalisation layer
266 real(real32), intent(in) :: momentum, epsilon
267 !! Momentum and epsilon
268 real(real32), intent(in) :: gamma_init_mean, gamma_init_std
269 !! Gamma initialisation mean and standard deviation
270 real(real32), intent(in) :: beta_init_mean, beta_init_std
271 !! Beta initialisation mean and standard deviation
272 class(base_init_type), allocatable, intent(in) :: &
273 gamma_initialiser, beta_initialiser
274 !! Gamma and beta initialisers
275 class(base_init_type), allocatable, intent(in) :: &
276 moving_mean_initialiser, moving_variance_initialiser
277 !! Moving mean and variance initialisers
278 integer, optional, intent(in) :: verbose
279 !! Verbosity level
280
281
282 this%name = "batchnorm1d"
283 this%type = "batc"
284 this%input_rank = 2
285 this%output_rank = 2
286 this%use_bias = .true.
287 this%momentum = momentum
288 this%epsilon = epsilon
289 if(allocated(this%kernel_init)) deallocate(this%kernel_init)
290 if(.not.allocated(gamma_initialiser))then
291 this%kernel_init = initialiser_setup('ones')
292 else
293 allocate(this%kernel_init, source=gamma_initialiser)
294 end if
295 if(allocated(this%bias_init)) deallocate(this%bias_init)
296 if(.not.allocated(beta_initialiser))then
297 this%bias_init = initialiser_setup('zeros')
298 else
299 allocate(this%bias_init, source=beta_initialiser)
300 end if
301 if(.not.allocated(moving_mean_initialiser))then
302 this%moving_mean_init = initialiser_setup('zeros')
303 else
304 this%moving_mean_init = moving_mean_initialiser
305 end if
306 if(.not.allocated(moving_variance_initialiser))then
307 this%moving_variance_init = initialiser_setup('ones')
308 else
309 this%moving_variance_init = moving_variance_initialiser
310 end if
311 this%gamma_init_mean = gamma_init_mean
312 this%gamma_init_std = gamma_init_std
313 this%beta_init_mean = beta_init_mean
314 this%beta_init_std = beta_init_std
315 this%kernel_init%mean = this%gamma_init_mean
316 this%kernel_init%std = this%gamma_init_std
317 this%bias_init%mean = this%beta_init_mean
318 this%bias_init%std = this%beta_init_std
319 if(present(verbose))then
320 if(abs(verbose).gt.0)then
321 write(*,'("BATCHNORM1D gamma (kernel) initialiser: ",A)') &
322 trim(this%kernel_init%name)
323 write(*,'("BATCHNORM1D beta (bias) initialiser: ",A)') &
324 trim(this%bias_init%name)
325 write(*,'("BATCHNORM1D moving mean initialiser: ",A)') &
326 trim(this%moving_mean_init%name)
327 write(*,'("BATCHNORM1D moving variance initialiser: ",A)') &
328 trim(this%moving_variance_init%name)
329 end if
330 end if
331
332 end subroutine set_hyperparams_batchnorm1d
333 !###############################################################################
334
335
336 !##############################################################################!
337 ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * !
338 !##############################################################################!
339
340
341 !###############################################################################
342 subroutine read_batchnorm1d(this, unit, verbose)
343 !! Read 1D batch normalisation layer from file
344 use athena__tools_infile, only: assign_val, assign_vec, move
345 use coreutils, only: to_lower, to_upper, icount
346 use athena__initialiser, only: initialiser_setup
347 implicit none
348
349 ! Arguments
350 class(batchnorm1d_layer_type), intent(inout) :: this
351 !! Instance of the 1D batch normalisation layer
352 integer, intent(in) :: unit
353 !! File unit
354 integer, optional, intent(in) :: verbose
355 !! Verbosity level
356
357 ! Local variables
358 integer :: stat, verbose_ = 0
359 !! File status and verbosity level
360 integer :: i, j, k, c, itmp1, iline, final_line
361 !! Temporary integers and loop indices
362 integer :: num_channels
363 !! Number of channels
364 real(real32) :: momentum = 0._real32, epsilon = 1.E-5_real32
365 !! Momentum and epsilon
366 class(base_init_type), allocatable :: gamma_initialiser, beta_initialiser
367 !! Initialisers
368 class(base_init_type), allocatable :: &
369 moving_mean_initialiser, moving_variance_initialiser
370 !! Moving mean and variance initialisers
371 character(14) :: gamma_initialiser_name='', beta_initialiser_name=''
372 !! Initialisers
373 character(14) :: &
374 moving_mean_initialiser_name='', &
375 moving_variance_initialiser_name=''
376 !! Moving mean and variance initialisers
377 character(256) :: buffer, tag, err_msg
378 !! Buffer, tag, and error message
379 integer, dimension(2) :: input_shape
380 !! Input shape
381 real(real32), allocatable, dimension(:) :: data_list
382 !! Data list
383 integer, dimension(2) :: param_lines
384 !! Lines where parameters are found
385
386
387 ! Initialise optional arguments
388 !---------------------------------------------------------------------------
389 if(present(verbose)) verbose_ = verbose
390
391
392 ! Loop over tags in layer card
393 !---------------------------------------------------------------------------
394 iline = 0
395 param_lines = 0
396 final_line = 0
397 tag_loop: do
398
399 ! Check for end of file
400 !------------------------------------------------------------------------
401 read(unit,'(A)',iostat=stat) buffer
402 if(stat.ne.0)then
403 write(err_msg,'("file encountered error (EoF?) before END ",A)') &
404 to_upper(this%name)
405 call stop_program(err_msg)
406 return
407 end if
408 if(trim(adjustl(buffer)).eq."") cycle tag_loop
409
410 ! Check for end of layer card
411 !------------------------------------------------------------------------
412 if(trim(adjustl(buffer)).eq."END "//to_upper(trim(this%name)))then
413 final_line = iline
414 backspace(unit)
415 exit tag_loop
416 end if
417 iline = iline + 1
418
419 tag = trim(adjustl(buffer))
420 if(scan(buffer,"=").ne.0) tag=trim(tag(:scan(tag,"=")-1))
421
422 ! Read parameters from save file
423 !------------------------------------------------------------------------
424 select case(trim(tag))
425 case("INPUT_SHAPE")
426 call assign_vec(buffer, input_shape, itmp1)
427 case("MOMENTUM")
428 call assign_val(buffer, momentum, itmp1)
429 case("EPSILON")
430 call assign_val(buffer, epsilon, itmp1)
431 case("NUM_CHANNELS")
432 call assign_val(buffer, num_channels, itmp1)
433 write(0,*) "NUM_CHANNELS and INPUT_SHAPE are conflicting parameters"
434 write(0,*) "NUM_CHANNELS will be ignored"
435 case("GAMMA_INITIALISER", "KERNEL_INITIALISER")
436 if(param_lines(1).ne.0)then
437 write(err_msg,'("GAMMA and GAMMA_INITIALISER defined. Using GAMMA only.")')
438 call print_warning(err_msg)
439 end if
440 call assign_val(buffer, gamma_initialiser_name, itmp1)
441 case("BETA_INITIALISER", "BIAS_INITIALISER")
442 if(param_lines(2).ne.0)then
443 write(err_msg,'("BETA and BETA_INITIALISER defined. Using BETA only.")')
444 call print_warning(err_msg)
445 end if
446 call assign_val(buffer, beta_initialiser_name, itmp1)
447 case("MOVING_MEAN_INITIALISER")
448 call assign_val(buffer, moving_mean_initialiser_name, itmp1)
449 case("MOVING_VARIANCE_INITIALISER")
450 call assign_val(buffer, moving_variance_initialiser_name, itmp1)
451 case("GAMMA")
452 gamma_initialiser_name = 'zeros'
453 param_lines(1) = iline
454 case("BETA")
455 beta_initialiser_name = 'zeros'
456 param_lines(2) = iline
457 case default
458 ! Don't look for "e" due to scientific notation of numbers
459 ! ... i.e. exponent (E+00)
460 if(scan(to_lower(trim(adjustl(buffer))),&
461 'abcdfghijklmnopqrstuvwxyz').eq.0)then
462 cycle tag_loop
463 elseif(tag(:3).eq.'END')then
464 cycle tag_loop
465 end if
466 write(err_msg,'("Unrecognised line in input file: ",A)') &
467 trim(adjustl(buffer))
468 call stop_program(err_msg)
469 return
470 end select
471 end do tag_loop
472 gamma_initialiser = initialiser_setup(gamma_initialiser_name)
473 beta_initialiser = initialiser_setup(beta_initialiser_name)
474 moving_mean_initialiser = initialiser_setup(moving_mean_initialiser_name)
475 moving_variance_initialiser = initialiser_setup(moving_variance_initialiser_name)
476
477
478 ! Set hyperparameters and initialise layer
479 !---------------------------------------------------------------------------
480 num_channels = input_shape(size(input_shape))
481 call this%set_hyperparams( &
482 momentum = momentum, &
483 epsilon = epsilon, &
484 gamma_init_mean = this%gamma_init_mean, &
485 gamma_init_std = this%gamma_init_std, &
486 beta_init_mean = this%beta_init_mean, &
487 beta_init_std = this%beta_init_std, &
488 gamma_initialiser = gamma_initialiser, &
489 beta_initialiser = beta_initialiser, &
490 moving_mean_initialiser = moving_mean_initialiser, &
491 moving_variance_initialiser = moving_variance_initialiser, &
492 verbose = verbose_ &
493 )
494 call this%init(input_shape = input_shape)
495
496
497 ! Check if WEIGHTS card was found
498 !---------------------------------------------------------------------------
499 allocate(data_list(num_channels), source=0._real32)
500 do i = 2, 1, -1
501 if(param_lines(i).eq.0) cycle
502 call move(unit, param_lines(i) - iline, iostat=stat)
503 iline = param_lines(i) + 1
504 c = 1
505 k = 1
506 data_list = 0._real32
507 data_concat_loop: do while(c.le.num_channels)
508 iline = iline + 1
509 read(unit,'(A)',iostat=stat) buffer
510 if(stat.ne.0) exit data_concat_loop
511 k = icount(buffer)
512 read(buffer,*,iostat=stat) (data_list(j),j=c,c+k-1)
513 c = c + k
514 end do data_concat_loop
515 read(unit,'(A)',iostat=stat) buffer
516 select case(i)
517 case(1) ! gamma
518 this%params(1)%val(1:this%num_channels,1) = data_list
519 if(trim(adjustl(buffer)).ne."END GAMMA")then
520 write(err_msg,'("END GAMMA not where expected: ",A)') &
521 trim(adjustl(buffer))
522 call stop_program(err_msg)
523 return
524 end if
525 case(2) ! beta
526 this%params(1)%val(this%num_channels+1:this%num_channels*2,1) = &
527 data_list
528 if(trim(adjustl(buffer)).ne."END BETA")then
529 write(err_msg,'("END BETA not where expected: ",A)') &
530 trim(adjustl(buffer))
531 call stop_program(err_msg)
532 return
533 end if
534 end select
535 end do
536 deallocate(data_list)
537
538
539 ! Check for end of layer card
540 !---------------------------------------------------------------------------
541 call move(unit, final_line - iline, iostat=stat)
542 read(unit,'(A)') buffer
543 if(trim(adjustl(buffer)).ne."END "//to_upper(trim(this%name)))then
544 write(0,*) trim(adjustl(buffer))
545 write(err_msg,'("END ",A," not where expected")') to_upper(this%name)
546 call stop_program(err_msg)
547 return
548 end if
549
550 end subroutine read_batchnorm1d
551 !###############################################################################
552
553
554 !###############################################################################
555 function read_batchnorm1d_layer(unit, verbose) result(layer)
556 !! Read 1D batch normalisation layer from file and return layer
557 implicit none
558
559 ! Arguments
560 integer, intent(in) :: unit
561 !! File unit
562 integer, optional, intent(in) :: verbose
563 !! Verbosity level
564 class(base_layer_type), allocatable :: layer
565 !! Allocatable instance of the base layer
566
567 ! Local variables
568 integer :: verbose_ = 0
569 !! Verbosity level
570
571 if(present(verbose)) verbose_ = verbose
572 allocate(layer, source=batchnorm1d_layer_type())
573 call layer%read(unit, verbose=verbose_)
574
575 end function read_batchnorm1d_layer
576 !###############################################################################
577
578
579 !###############################################################################
580 subroutine build_from_onnx_batchnorm1d( &
581 this, node, initialisers, value_info, verbose &
582 )
583 !! Read ONNX attributes for 1D batch normalisation layer
584 use athena__initialiser_data, only: data_init_type
585 implicit none
586
587 ! Arguments
588 class(batchnorm1d_layer_type), intent(inout) :: this
589 !! Instance of the 1D batch normalisation layer
590 type(onnx_node_type), intent(in) :: node
591 !! ONNX node information
592 type(onnx_initialiser_type), dimension(:), intent(in) :: initialisers
593 !! ONNX initialiser information
594 type(onnx_tensor_type), dimension(:), intent(in) :: value_info
595 !! ONNX value info
596 integer, intent(in) :: verbose
597 !! Verbosity level
598
599 ! Local variables
600 integer :: i
601 !! Loop index
602 real(real32) :: epsilon, momentum
603 !! Epsilon and momentum values
604 character(256) :: val
605 !! Attribute value
606 class(base_init_type), allocatable :: gamma_initialiser, beta_initialiser
607 class(base_init_type), allocatable :: &
608 moving_mean_initialiser, moving_variance_initialiser
609
610 ! Set default values
611 epsilon = 1.E-5_real32
612 momentum = 0.9_real32
613
614 ! Parse ONNX attributes
615 do i = 1, size(node%attributes)
616 val = node%attributes(i)%val
617 select case(trim(adjustl(node%attributes(i)%name)))
618 case("epsilon")
619 read(val,*) epsilon
620 case("momentum")
621 read(val,*) momentum
622 case default
623 ! Do nothing
624 write(0,*) "WARNING: Unrecognised attribute in ONNX BATCHNORM1D &
625 &layer: ", trim(adjustl(node%attributes(i)%name))
626 end select
627 end do
628
629 ! Check for 4 initialisers: gamma, beta, mean, variance
630 if(size(initialisers).ne.4)then
631 call stop_program("ONNX BATCHNORM1D layer requires 4 initialisers &
632 &(gamma, beta, mean, variance)")
633 return
634 end if
635
636 ! ONNX BatchNormalization order: gamma, beta, mean, variance
637 gamma_initialiser = data_init_type( data = initialisers(1)%data )
638 beta_initialiser = data_init_type( data = initialisers(2)%data )
639 moving_mean_initialiser = data_init_type( data = initialisers(3)%data )
640 moving_variance_initialiser = data_init_type( data = initialisers(4)%data )
641
642 call this%set_hyperparams( &
643 momentum = momentum, &
644 epsilon = epsilon, &
645 gamma_init_mean = 1.0_real32, &
646 gamma_init_std = 0.0_real32, &
647 beta_init_mean = 0.0_real32, &
648 beta_init_std = 0.0_real32, &
649 gamma_initialiser = gamma_initialiser, &
650 beta_initialiser = beta_initialiser, &
651 moving_mean_initialiser = moving_mean_initialiser, &
652 moving_variance_initialiser = moving_variance_initialiser, &
653 verbose = verbose &
654 )
655
656 end subroutine build_from_onnx_batchnorm1d
657 !###############################################################################
658
659
660 !##############################################################################!
661 ! * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * !
662 !##############################################################################!
663
664
665 !###############################################################################
666 subroutine forward_batchnorm1d(this, input)
667 !! Forward propagation
668 implicit none
669
670 ! Arguments
671 class(batchnorm1d_layer_type), intent(inout) :: this
672 !! Instance of the 1D batch normalisation layer
673 class(array_type), dimension(:,:), intent(in) :: input
674 !! Input values
675
676 ! Local variables
677 class(batchnorm_array_type), pointer :: ptr
678 ! Pointer array
679
680
681 select case(this%inference)
682 case(.true.)
683 ! Do not perform the drop operation
684
685 ptr => batchnorm_inference(input(1,1), this%params(1), &
686 this%mean(:), this%variance(:), this%epsilon &
687 )
688
689 case default
690 ! Perform the drop operation
691 ptr => batchnorm( &
692 input(1,1), this%params(1),&
693 this%momentum, this%mean(:), this%variance(:), this%epsilon &
694 )
695
696 end select
697 select type(output => this%output(1,1))
698 type is(batchnorm_array_type)
699 call output%assign_shallow(ptr)
700 output%epsilon = ptr%epsilon
701 output%mean = ptr%mean
702 output%variance = ptr%variance
703 end select
704 deallocate(ptr)
705 this%output(1,1)%is_temporary = .false.
706
707 end subroutine forward_batchnorm1d
708 !###############################################################################
709
710 end module athena__batchnorm1d_layer
711