| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | module athena__activation_leaky_relu | ||
| 2 | !! Module containing implementation of the leaky ReLU activation function | ||
| 3 | !! | ||
| 4 | !! This module implements Leaky ReLU, a variant of ReLU that allows small | ||
| 5 | !! negative values to prevent "dying ReLU" problem. | ||
| 6 | !! | ||
| 7 | !! Mathematical operation: | ||
| 8 | !! \[ f(x) = \begin{cases} x & \text{if } x > 0 \\\\ \alpha x & \text{if } x \leq 0 \end{cases} \] | ||
| 9 | !! | ||
| 10 | !! where \(\alpha\) is a small positive constant (typically 0.01 or 0.2) | ||
| 11 | !! | ||
| 12 | !! Derivative: | ||
| 13 | !! \[ f'(x) = \begin{cases} 1 & \text{if } x > 0 \\\\ \alpha & \text{if } x \leq 0 \end{cases} \] | ||
| 14 | !! | ||
| 15 | !! Properties: Non-saturating, allows gradient flow for negative inputs | ||
| 16 | !! Reference: Maas et al. (2013), ICML | ||
| 17 | use coreutils, only: real32, print_warning | ||
| 18 | use diffstruc, only: array_type, operator(*), max | ||
| 19 | use athena__misc_types, only: base_actv_type | ||
| 20 | use athena__activation_relu, only: relu_actv_type | ||
| 21 | use athena__misc_types, only: onnx_attribute_type | ||
| 22 | implicit none | ||
| 23 | |||
| 24 | |||
| 25 | private | ||
| 26 | |||
| 27 | public :: leaky_relu_actv_type, create_from_onnx_leaky_relu_activation | ||
| 28 | |||
| 29 | |||
| 30 | type, extends(relu_actv_type) :: leaky_relu_actv_type | ||
| 31 | real(real32) :: alpha | ||
| 32 | contains | ||
| 33 | procedure, pass(this) :: apply => leaky_relu_activate | ||
| 34 | procedure, pass(this) :: reset => leaky_relu_reset | ||
| 35 | procedure, pass(this) :: apply_attributes => apply_attributes_leaky_relu | ||
| 36 | procedure, pass(this) :: export_attributes => export_attributes_leaky_relu | ||
| 37 | end type leaky_relu_actv_type | ||
| 38 | |||
| 39 | interface leaky_relu_actv_type | ||
| 40 | procedure initialise | ||
| 41 | end interface leaky_relu_actv_type | ||
| 42 | |||
| 43 | |||
| 44 | |||
| 45 | contains | ||
| 46 | |||
| 47 | !############################################################################### | ||
| 48 | − | function initialise(scale, attributes) result(activation) | |
| 49 | !! Initialise a leaky ReLU activation function | ||
| 50 | implicit none | ||
| 51 | |||
| 52 | ! Arguments | ||
| 53 | real(real32), intent(in), optional :: scale | ||
| 54 | !! Optional scale factor for activation output | ||
| 55 | type(onnx_attribute_type), dimension(:), intent(in), optional :: attributes | ||
| 56 | !! Optional array of ONNX attributes | ||
| 57 | type(leaky_relu_actv_type) :: activation | ||
| 58 | !! Leaky ReLU activation type | ||
| 59 | |||
| 60 | |||
| 61 | − | call activation%reset() | |
| 62 | |||
| 63 | − | if(present(scale)) activation%scale = scale | |
| 64 | − | if(abs(activation%scale-1._real32) .gt. 1.e-6_real32)then | |
| 65 | − | activation%apply_scaling = .true. | |
| 66 | end if | ||
| 67 | |||
| 68 | − | if(present(attributes)) then | |
| 69 | − | call activation%apply_attributes(attributes) | |
| 70 | end if | ||
| 71 | |||
| 72 | − | end function initialise | |
| 73 | !------------------------------------------------------------------------------- | ||
| 74 | − | pure subroutine leaky_relu_reset(this) | |
| 75 | !! Reset leaky ReLU activation function attributes and variables | ||
| 76 | implicit none | ||
| 77 | |||
| 78 | ! Arguments | ||
| 79 | class(leaky_relu_actv_type), intent(inout) :: this | ||
| 80 | !! Leaky ReLU activation type | ||
| 81 | |||
| 82 | − | this%name = "leaky_relu" | |
| 83 | − | this%scale = 1._real32 | |
| 84 | − | this%threshold = 0._real32 | |
| 85 | − | this%alpha = 0.01_real32 | |
| 86 | − | this%apply_scaling = .false. | |
| 87 | |||
| 88 | − | end subroutine leaky_relu_reset | |
| 89 | !------------------------------------------------------------------------------- | ||
| 90 | − | function create_from_onnx_leaky_relu_activation(attributes) result(activation) | |
| 91 | !! Create leaky ReLU activation function from ONNX attributes | ||
| 92 | implicit none | ||
| 93 | |||
| 94 | ! Arguments | ||
| 95 | type(onnx_attribute_type), dimension(:), intent(in) :: attributes | ||
| 96 | !! Array of ONNX attributes | ||
| 97 | |||
| 98 | class(base_actv_type), allocatable :: activation | ||
| 99 | !! Instance of activation type | ||
| 100 | |||
| 101 | − | allocate(activation, source = leaky_relu_actv_type(attributes = attributes)) | |
| 102 | |||
| 103 | − | end function create_from_onnx_leaky_relu_activation | |
| 104 | !############################################################################### | ||
| 105 | |||
| 106 | |||
| 107 | !############################################################################### | ||
| 108 | − | subroutine apply_attributes_leaky_relu(this, attributes) | |
| 109 | !! Load ONNX attributes into leaky ReLU activation function | ||
| 110 | implicit none | ||
| 111 | |||
| 112 | ! Arguments | ||
| 113 | class(leaky_relu_actv_type), intent(inout) :: this | ||
| 114 | !! Leaky ReLU activation type | ||
| 115 | type(onnx_attribute_type), dimension(:), intent(in) :: attributes | ||
| 116 | !! Array of ONNX attributes | ||
| 117 | |||
| 118 | ! Local variables | ||
| 119 | integer :: i | ||
| 120 | !! Loop variable | ||
| 121 | |||
| 122 | ! Load provided attributes | ||
| 123 | − | do i=1, size(attributes,dim=1) | |
| 124 | − | select case(trim(attributes(i)%name)) | |
| 125 | case("scale") | ||
| 126 | − | read(attributes(i)%val,*) this%scale | |
| 127 | − | if(abs(this%scale-1._real32) .gt. 1.e-6_real32)then | |
| 128 | − | this%apply_scaling = .true. | |
| 129 | else | ||
| 130 | − | this%apply_scaling = .false. | |
| 131 | end if | ||
| 132 | case("alpha") | ||
| 133 | − | read(attributes(i)%val,*) this%alpha | |
| 134 | case("name") | ||
| 135 | − | if(trim(attributes(i)%val) .ne. trim(this%name)) then | |
| 136 | call print_warning( & | ||
| 137 | 'Leaky ReLU activation: name attribute "' // & | ||
| 138 | − | trim(attributes(i)%val) // & | |
| 139 | '"" does not match expected "' // trim(this%name)//'"' & | ||
| 140 | − | ) | |
| 141 | |||
| 142 | end if | ||
| 143 | case default | ||
| 144 | call print_warning( & | ||
| 145 | − | 'Leaky ReLU activation: unknown attribute '//trim(attributes(i)%name) & | |
| 146 | − | ) | |
| 147 | end select | ||
| 148 | end do | ||
| 149 | |||
| 150 | − | end subroutine apply_attributes_leaky_relu | |
| 151 | !############################################################################### | ||
| 152 | |||
| 153 | |||
| 154 | !############################################################################### | ||
| 155 | − | pure function export_attributes_leaky_relu(this) result(attributes) | |
| 156 | !! Export leaky ReLU activation function attributes as ONNX attributes | ||
| 157 | implicit none | ||
| 158 | |||
| 159 | ! Arguments | ||
| 160 | class(leaky_relu_actv_type), intent(in) :: this | ||
| 161 | !! Leaky ReLU activation type | ||
| 162 | type(onnx_attribute_type), allocatable, dimension(:) :: attributes | ||
| 163 | !! Array of ONNX attributes | ||
| 164 | |||
| 165 | ! Local variables | ||
| 166 | character(50) :: buffer | ||
| 167 | !! Temporary string buffer | ||
| 168 | |||
| 169 | − | allocate(attributes(3)) | |
| 170 | |||
| 171 | − | write(buffer, '(A)') this%name | |
| 172 | − | attributes(1) = onnx_attribute_type( & | |
| 173 | − | "name", "string", trim(adjustl(buffer)) ) | |
| 174 | |||
| 175 | − | write(buffer, '(F10.6)') this%scale | |
| 176 | − | attributes(2) = onnx_attribute_type( & | |
| 177 | − | "scale", "float", trim(adjustl(buffer)) ) | |
| 178 | |||
| 179 | − | write(buffer, '(F10.6)') this%alpha | |
| 180 | − | attributes(3) = onnx_attribute_type( & | |
| 181 | − | "alpha", "float", trim(adjustl(buffer)) ) | |
| 182 | |||
| 183 | − | end function export_attributes_leaky_relu | |
| 184 | !############################################################################### | ||
| 185 | |||
| 186 | |||
| 187 | !############################################################################### | ||
| 188 | − | function leaky_relu_activate(this, val) result(output) | |
| 189 | !! Apply leaky ReLU activation to 1D array | ||
| 190 | !! | ||
| 191 | !! Computes: f = max(0.01x, x) | ||
| 192 | implicit none | ||
| 193 | |||
| 194 | ! Arguments | ||
| 195 | class(leaky_relu_actv_type), intent(in) :: this | ||
| 196 | !! Leaky ReLU activation type | ||
| 197 | type(array_type), intent(in) :: val | ||
| 198 | !! Input values | ||
| 199 | type(array_type), pointer :: output | ||
| 200 | !! Activated output values | ||
| 201 | |||
| 202 | ! allocate(output) | ||
| 203 | − | if(this%apply_scaling)then | |
| 204 | − | output => max(val * this%alpha, val) * this%scale | |
| 205 | else | ||
| 206 | − | output => max(val * this%alpha, val) | |
| 207 | end if | ||
| 208 | − | end function leaky_relu_activate | |
| 209 | !############################################################################### | ||
| 210 | |||
| 211 | − | end module athena__activation_leaky_relu | |
| 212 |