GCC Code Coverage Report


Directory: src/athena/
File: athena_activation_softmax.f90
Date: 2025-12-10 07:37:07
Exec Total Coverage
Lines: 0 0 100.0%
Functions: 0 0 -%
Branches: 0 0 -%

Line Branch Exec Source
1 module athena__activation_softmax
2 !! Module containing implementation of the softmax activation function
3 !!
4 !! This module implements softmax for converting logits into probability
5 !! distributions. Commonly used for multi-class classification.
6 !!
7 !! Mathematical operation:
8 !! \[ \text{softmax}(\mathbf{x})_i = \frac{e^{x_i}}{\sum_{j=1}^{n} e^{x_j}} \]
9 !!
10 !! Properties:
11 !! - Outputs sum to 1: \(\sum_{i=1}^{n} \text{softmax}(\mathbf{x})_i = 1\)
12 !! - All outputs in range \((0, 1)\)
13 !! - Preserves ordering: \(x_i > x_j \Rightarrow f(x_i) > f(x_j)\)
14 !! - Translation invariant: \(\text{softmax}(\mathbf{x}+c) = \text{softmax}(\mathbf{x})\)
15 !!
16 !! Derivative (Jacobian):
17 !! \[ \frac{\partial f_i}{\partial x_j} = f_i(\delta_{ij} - f_j) \]
18 !! where \(\delta_{ij}\) is the Kronecker delta
19 use coreutils, only: real32, print_warning
20 use diffstruc, only: array_type, operator(*)
21 use athena__diffstruc_extd, only: softmax
22 use athena__misc_types, only: base_actv_type
23 use athena__misc_types, only: onnx_attribute_type
24 implicit none
25
26
27 private
28
29 public :: softmax_actv_type, create_from_onnx_softmax_activation
30
31
32 type, extends(base_actv_type) :: softmax_actv_type
33 !! Type for softmax activation function with overloaded procedures
34 contains
35 procedure, pass(this) :: apply => apply_softmax
36 procedure, pass(this) :: reset => reset_softmax
37 procedure, pass(this) :: apply_attributes => apply_attributes_softmax
38 procedure, pass(this) :: export_attributes => export_attributes_softmax
39 end type softmax_actv_type
40
41 interface softmax_actv_type
42 procedure initialise
43 end interface softmax_actv_type
44
45
46
47 contains
48
49 !###############################################################################
50 function initialise(scale, attributes) result(activation)
51 !! Initialise a softmax activation function
52 implicit none
53
54 ! Arguments
55 real(real32), intent(in), optional :: scale
56 !! Optional scale factor for activation output
57 type(onnx_attribute_type), dimension(:), intent(in), optional :: attributes
58 !! Optional array of ONNX attributes
59 type(softmax_actv_type) :: activation
60 !! Softmax activation type
61
62 call activation%reset()
63
64 if(present(scale)) activation%scale = scale
65 if(abs(activation%scale-1._real32) .gt. 1.e-6_real32)then
66 activation%apply_scaling = .true.
67 end if
68
69 if(present(attributes)) then
70 call activation%apply_attributes(attributes)
71 end if
72
73 end function initialise
74 !-------------------------------------------------------------------------------
75 pure subroutine reset_softmax(this)
76 !! Reset softmax activation function attributes and variables
77 implicit none
78
79 ! Arguments
80 class(softmax_actv_type), intent(inout) :: this
81 !! Softmax activation type
82
83 this%name = "softmax"
84 this%scale = 1._real32
85 this%threshold = 0._real32
86 this%apply_scaling = .false.
87
88 end subroutine reset_softmax
89 !-------------------------------------------------------------------------------
90 function create_from_onnx_softmax_activation(attributes) result(activation)
91 !! Create softmax activation function from ONNX attributes
92 implicit none
93
94 ! Arguments
95 type(onnx_attribute_type), dimension(:), intent(in) :: attributes
96 !! Array of ONNX attributes
97
98 class(base_actv_type), allocatable :: activation
99 !! Instance of activation type
100
101 allocate(activation, source = softmax_actv_type(attributes = attributes))
102
103 end function create_from_onnx_softmax_activation
104 !###############################################################################
105
106
107 !###############################################################################
108 subroutine apply_attributes_softmax(this, attributes)
109 !! Load ONNX attributes into softmax activation function
110 implicit none
111
112 ! Arguments
113 class(softmax_actv_type), intent(inout) :: this
114 !! Softmax activation type
115 type(onnx_attribute_type), dimension(:), intent(in) :: attributes
116 !! Array of ONNX attributes
117
118 ! Local variables
119 integer :: i
120 !! Loop variable
121
122 ! Load provided attributes
123 do i=1, size(attributes,dim=1)
124 select case(trim(attributes(i)%name))
125 case("scale")
126 read(attributes(i)%val,*) this%scale
127 if(abs(this%scale-1._real32) .gt. 1.e-6_real32)then
128 this%apply_scaling = .true.
129 else
130 this%apply_scaling = .false.
131 end if
132 case("name")
133 if(trim(attributes(i)%val) .ne. trim(this%name)) then
134 call print_warning( &
135 'Softmax activation: name attribute "' // &
136 trim(attributes(i)%val) // &
137 '"" does not match expected "' // trim(this%name)//'"' &
138 )
139
140 end if
141 case default
142 call print_warning( &
143 'Softmax activation: unknown attribute '// &
144 trim(attributes(i)%name) &
145 )
146 end select
147 end do
148
149 end subroutine apply_attributes_softmax
150 !###############################################################################
151
152
153 !###############################################################################
154 pure function export_attributes_softmax(this) result(attributes)
155 !! Export softmax activation function attributes as ONNX attributes
156 implicit none
157
158 ! Arguments
159 class(softmax_actv_type), intent(in) :: this
160 !! Softmax activation type
161 type(onnx_attribute_type), allocatable, dimension(:) :: attributes
162 !! Array of ONNX attributes
163
164 ! Local variables
165 character(50) :: buffer
166 !! Temporary string buffer
167
168 allocate(attributes(2))
169
170 write(buffer, '(A)') this%name
171 attributes(1) = onnx_attribute_type( &
172 "name", "string", trim(adjustl(buffer)) )
173
174 write(buffer, '(F10.6)') this%scale
175 attributes(2) = onnx_attribute_type( &
176 "scale", "float", trim(adjustl(buffer)) )
177
178 end function export_attributes_softmax
179 !###############################################################################
180
181
182 !###############################################################################
183 function apply_softmax(this, val) result(output)
184 !! Apply softmax activation to 1D array
185 !!
186 !! Computes: f = exp(x-max)/sum(exp(x-max))
187 implicit none
188
189 ! Arguments
190 class(softmax_actv_type), intent(in) :: this
191 !! Softmax activation type
192 type(array_type), intent(in) :: val
193 !! Input values
194 type(array_type), pointer :: output
195 !! Normalised probability distribution output
196
197 !! compute softmax values
198 if(this%apply_scaling)then
199 output => softmax(val, dim=2) * this%scale
200 else
201 output => softmax(val, dim=2)
202 end if
203 end function apply_softmax
204 !###############################################################################
205
206 end module athena__activation_softmax
207