GCC Code Coverage Report


Directory: src/athena/
File: athena_activation_selu.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 0 0 100.0%
Functions: 0 0 -%
Branches: 0 0 -%

Line Branch Exec Source
1 module athena__activation_selu
2 !! Module containing implementation of the SELU activation function
3 !!
4 !! This module implements Scaled Exponential Linear Unit (SELU), which has
5 !! self-normalizing properties for deep networks.
6 !!
7 !! Mathematical operation:
8 !! \[ f(x) = \lambda \begin{cases} x & \text{if } x > 0 \\\\ \alpha(e^x - 1) & \text{if } x \leq 0 \end{cases} \]
9 !!
10 !! where \(\lambda \approx 1.0507\) and \(\alpha \approx 1.6733\)
11 !! preserve mean=0, variance=1
12 !!
13 !! Derivative:
14 !! \[ f'(x) = \lambda \begin{cases} 1 & \text{if } x > 0 \\\\ \alpha e^x & \text{if } x \leq 0 \end{cases} \]
15 !!
16 !! Properties: Self-normalizing, enables very deep networks
17 !! Requires: Lecun Normal initialisation, alpha dropout
18 !! Reference: Klambauer et al. (2017), NeurIPS
19 use coreutils, only: real32, print_warning
20 use diffstruc, only: array_type, operator(*), operator(-), operator(.gt.), &
21 merge, exp
22 use athena__misc_types, only: base_actv_type
23 use athena__activation_relu, only: relu_actv_type
24 use athena__misc_types, only: onnx_attribute_type
25 implicit none
26
27
28 private
29
30 public :: selu_actv_type, create_from_onnx_selu_activation
31
32
33 type, extends(relu_actv_type) :: selu_actv_type
34 !! Type for SELU activation function with overloaded procedures
35 real(real32) :: alpha = 1.6732632423543772848170429916717_real32
36 !! Alpha parameter for SELU
37 real(real32) :: lambda = 1.0507009873554804934193349852946_real32
38 !! Lambda parameter for SELU
39 contains
40 procedure, pass(this) :: apply => apply_selu
41 procedure, pass(this) :: reset => reset_selu
42 procedure, pass(this) :: apply_attributes => apply_attributes_selu
43 procedure, pass(this) :: export_attributes => export_attributes_selu
44 end type selu_actv_type
45
46 interface selu_actv_type
47 procedure initialise
48 end interface selu_actv_type
49
50
51
52 contains
53
54 !###############################################################################
55 function initialise(scale, alpha, lambda, attributes) result(activation)
56 !! Initialise a SELU activation function
57 implicit none
58
59 ! Arguments
60 real(real32), optional, intent(in) :: scale
61 !! Optional scale factor for activation output
62 real(real32), optional, intent(in) :: alpha
63 !! Optional alpha parameter (default: 1.67326)
64 real(real32), optional, intent(in) :: lambda
65 !! Optional lambda parameter (default: 1.0507)
66 type(selu_actv_type) :: activation
67 !! SELU activation type
68 type(onnx_attribute_type), optional, intent(in) :: attributes(:)
69 !! Optional ONNX attributes
70
71
72 call activation%reset()
73
74 if(present(scale)) activation%scale = scale
75 if(abs(activation%scale-1._real32) .gt. 1.e-6_real32)then
76 activation%apply_scaling = .true.
77 end if
78 if(present(alpha)) activation%alpha = alpha
79 if(present(lambda)) activation%lambda = lambda
80 if(present(attributes))then
81 call activation%apply_attributes(attributes)
82 end if
83
84 end function initialise
85 !-------------------------------------------------------------------------------
86 pure subroutine reset_selu(this)
87 !! Reset SELU activation function attributes and variables
88 implicit none
89
90 ! Arguments
91 class(selu_actv_type), intent(inout) :: this
92 !! SELU activation type
93
94 this%name = "selu"
95 this%scale = 1._real32
96 this%threshold = 0._real32
97 this%apply_scaling = .false.
98 this%alpha = 1.67326_real32
99 this%lambda = 1.0507_real32
100
101 end subroutine reset_selu
102 !-------------------------------------------------------------------------------
103 function create_from_onnx_selu_activation(attributes) result(activation)
104 !! Create SELU activation function from ONNX attributes
105 implicit none
106
107 ! Arguments
108 type(onnx_attribute_type), dimension(:), intent(in) :: attributes
109 !! Array of ONNX attributes
110
111 class(base_actv_type), allocatable :: activation
112 !! Instance of activation type
113
114 allocate(activation, source = selu_actv_type(attributes = attributes))
115
116 end function create_from_onnx_selu_activation
117 !###############################################################################
118
119
120 !###############################################################################
121 subroutine apply_attributes_selu(this, attributes)
122 !! Load ONNX attributes into SELU activation function
123 implicit none
124
125 ! Arguments
126 class(selu_actv_type), intent(inout) :: this
127 !! SELU activation type
128 type(onnx_attribute_type), dimension(:), intent(in) :: attributes
129 !! Array of ONNX attributes
130
131 ! Local variables
132 integer :: i
133 !! Loop variable
134
135 ! Load provided attributes
136 do i=1, size(attributes,dim=1)
137 select case(trim(attributes(i)%name))
138 case("scale")
139 read(attributes(i)%val,*) this%scale
140 if(abs(this%scale-1._real32) .gt. 1.e-6_real32)then
141 this%apply_scaling = .true.
142 else
143 this%apply_scaling = .false.
144 end if
145 case("alpha")
146 read(attributes(i)%val,*) this%alpha
147 case("lambda")
148 read(attributes(i)%val,*) this%lambda
149 case("name")
150 if(trim(attributes(i)%val) .ne. trim(this%name)) then
151 call print_warning( &
152 'SELU activation: name attribute "' // &
153 trim(attributes(i)%val) // &
154 '"" does not match expected "' // trim(this%name)//'"' &
155 )
156
157 end if
158 case default
159 call print_warning( &
160 'SELU activation: unknown attribute '// &
161 trim(attributes(i)%name) &
162 )
163 end select
164 end do
165
166 end subroutine apply_attributes_selu
167 !###############################################################################
168
169
170 !###############################################################################
171 pure function export_attributes_selu(this) result(attributes)
172 !! Export SELU activation function attributes as ONNX attributes
173 implicit none
174
175 ! Arguments
176 class(selu_actv_type), intent(in) :: this
177 !! SELU activation type
178 type(onnx_attribute_type), allocatable, dimension(:) :: attributes
179 !! Array of ONNX attributes
180
181 ! Local variables
182 character(50) :: buffer
183 !! Temporary string buffer
184
185 allocate(attributes(4))
186
187 write(buffer, '(A)') this%name
188 attributes(1) = onnx_attribute_type( &
189 "name", "string", trim(adjustl(buffer)) )
190
191 write(buffer, '(F10.6)') this%scale
192 attributes(2) = onnx_attribute_type( &
193 "scale", "float", trim(adjustl(buffer)) )
194
195 write(buffer, '(F10.6)') this%alpha
196 attributes(3) = onnx_attribute_type( &
197 "alpha", "float", trim(adjustl(buffer)) )
198
199 write(buffer, '(F10.6)') this%lambda
200 attributes(4) = onnx_attribute_type( &
201 "lambda", "float", trim(adjustl(buffer)) )
202
203 end function export_attributes_selu
204 !###############################################################################
205
206
207 !###############################################################################
208 function apply_selu(this, val) result(output)
209 !! Apply SELU activation to array
210 !!
211 !! Computes: f(x) = λ * x if x > 0
212 !! f(x) = λ * α * (exp(x) - 1) if x ≤ 0
213 implicit none
214
215 ! Arguments
216 class(selu_actv_type), intent(in) :: this
217 !! SELU activation type
218 type(array_type), intent(in) :: val
219 !! Input values
220 type(array_type), pointer :: output
221 !! Activated output values
222
223 ! Local variables
224 type(array_type), pointer :: positive_part, negative_part
225
226 ! Compute SELU: λ * merge(x, α * (exp(x) - 1), x > 0)
227 positive_part => val * this%lambda
228 negative_part => (exp(val) - 1._real32) * this%alpha * this%lambda
229 output => merge(positive_part, negative_part, val .gt. 0._real32)
230
231 if(this%apply_scaling)then
232 output => output * this%scale
233 end if
234
235 end function apply_selu
236 !###############################################################################
237
238 end module athena__activation_selu
239