| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | module athena__activation_relu | ||
| 2 | !! Module containing implementation of the ReLU activation function | ||
| 3 | !! | ||
| 4 | !! This module implements the Rectified Linear Unit (ReLU) activation, | ||
| 5 | !! which zeroes negative values while preserving positive values. | ||
| 6 | !! | ||
| 7 | !! Mathematical operation: | ||
| 8 | !! \[ f(x) = \max(0, x) = \begin{cases} x & \text{if } x > 0 \\\\ 0 & \text{if } x \leq 0 \end{cases} \] | ||
| 9 | !! | ||
| 10 | !! Derivative: | ||
| 11 | !! \[ f'(x) = \begin{cases} 1 & \text{if } x > 0 \\\\ 0 & \text{if } x \leq 0 \end{cases} \] | ||
| 12 | !! | ||
| 13 | !! Properties: Non-saturating, sparse activation, mitigates vanishing gradients | ||
| 14 | !! Reference: Nair & Hinton (2010), ICML | ||
| 15 | use coreutils, only: real32, print_warning | ||
| 16 | use diffstruc, only: array_type, operator(*), max | ||
| 17 | use athena__misc_types, only: base_actv_type | ||
| 18 | use athena__misc_types, only: onnx_attribute_type | ||
| 19 | implicit none | ||
| 20 | |||
| 21 | |||
| 22 | private | ||
| 23 | |||
| 24 | public :: relu_actv_type, create_from_onnx_relu_activation | ||
| 25 | |||
| 26 | |||
| 27 | type, extends(base_actv_type) :: relu_actv_type | ||
| 28 | !! Type for ReLU activation function with overloaded procedures | ||
| 29 | contains | ||
| 30 | procedure, pass(this) :: apply => apply_relu | ||
| 31 | procedure, pass(this) :: reset => reset_relu | ||
| 32 | procedure, pass(this) :: apply_attributes => apply_attributes_relu | ||
| 33 | procedure, pass(this) :: export_attributes => export_attributes_relu | ||
| 34 | end type relu_actv_type | ||
| 35 | |||
| 36 | interface relu_actv_type | ||
| 37 | procedure initialise | ||
| 38 | end interface relu_actv_type | ||
| 39 | |||
| 40 | |||
| 41 | |||
| 42 | contains | ||
| 43 | |||
| 44 | !############################################################################### | ||
| 45 | − | function initialise(scale, attributes) result(activation) | |
| 46 | !! Initialise a ReLU activation function | ||
| 47 | implicit none | ||
| 48 | |||
| 49 | ! Arguments | ||
| 50 | real(real32), optional, intent(in) :: scale | ||
| 51 | !! Optional scale factor for activation output | ||
| 52 | type(relu_actv_type) :: activation | ||
| 53 | !! ReLU activation type | ||
| 54 | type(onnx_attribute_type), optional, intent(in) :: attributes(:) | ||
| 55 | !! Optional ONNX attributes | ||
| 56 | |||
| 57 | |||
| 58 | − | call activation%reset() | |
| 59 | |||
| 60 | − | if(present(scale)) activation%scale = scale | |
| 61 | − | if(abs(activation%scale-1._real32) .gt. 1.e-6_real32)then | |
| 62 | − | activation%apply_scaling = .true. | |
| 63 | end if | ||
| 64 | − | if(present(attributes))then | |
| 65 | − | call activation%apply_attributes(attributes) | |
| 66 | end if | ||
| 67 | |||
| 68 | − | end function initialise | |
| 69 | !------------------------------------------------------------------------------- | ||
| 70 | − | pure subroutine reset_relu(this) | |
| 71 | !! Reset ReLU activation function attributes and variables | ||
| 72 | implicit none | ||
| 73 | |||
| 74 | ! Arguments | ||
| 75 | class(relu_actv_type), intent(inout) :: this | ||
| 76 | !! ReLU activation type | ||
| 77 | |||
| 78 | − | this%name = "relu" | |
| 79 | − | this%scale = 1._real32 | |
| 80 | − | this%threshold = 0._real32 | |
| 81 | − | this%apply_scaling = .false. | |
| 82 | |||
| 83 | − | end subroutine reset_relu | |
| 84 | !------------------------------------------------------------------------------- | ||
| 85 | − | function create_from_onnx_relu_activation(attributes) result(activation) | |
| 86 | !! Create ReLU activation function from ONNX attributes | ||
| 87 | implicit none | ||
| 88 | |||
| 89 | ! Arguments | ||
| 90 | type(onnx_attribute_type), dimension(:), intent(in) :: attributes | ||
| 91 | !! Array of ONNX attributes | ||
| 92 | |||
| 93 | class(base_actv_type), allocatable :: activation | ||
| 94 | !! Instance of activation type | ||
| 95 | |||
| 96 | − | allocate(activation, source = relu_actv_type(attributes = attributes)) | |
| 97 | |||
| 98 | − | end function create_from_onnx_relu_activation | |
| 99 | !############################################################################### | ||
| 100 | |||
| 101 | |||
| 102 | !############################################################################### | ||
| 103 | − | subroutine apply_attributes_relu(this, attributes) | |
| 104 | !! Load ONNX attributes into ReLU activation function | ||
| 105 | implicit none | ||
| 106 | |||
| 107 | ! Arguments | ||
| 108 | class(relu_actv_type), intent(inout) :: this | ||
| 109 | !! ReLU activation type | ||
| 110 | type(onnx_attribute_type), dimension(:), intent(in) :: attributes | ||
| 111 | !! Array of ONNX attributes | ||
| 112 | |||
| 113 | ! Local variables | ||
| 114 | integer :: i | ||
| 115 | !! Loop variable | ||
| 116 | type(onnx_attribute_type) :: attribute | ||
| 117 | !! Temporary attribute holder | ||
| 118 | character(20), allocatable, dimension(:) :: attribute_names | ||
| 119 | |||
| 120 | ! Load provided attributes | ||
| 121 | − | do i=1, size(attributes,dim=1) | |
| 122 | − | select case(trim(attributes(i)%name)) | |
| 123 | case("scale") | ||
| 124 | − | read(attributes(i)%val,*) this%scale | |
| 125 | − | if(abs(this%scale-1._real32) .gt. 1.e-6_real32)then | |
| 126 | − | this%apply_scaling = .true. | |
| 127 | else | ||
| 128 | − | this%apply_scaling = .false. | |
| 129 | end if | ||
| 130 | case("threshold") | ||
| 131 | − | read(attributes(i)%val,*) this%threshold | |
| 132 | case("name") | ||
| 133 | − | if(trim(attributes(i)%val) .ne. trim(this%name)) then | |
| 134 | call print_warning( & | ||
| 135 | 'ReLU activation: name attribute "' // & | ||
| 136 | − | trim(attributes(i)%val) // & | |
| 137 | '"" does not match expected "' // trim(this%name)//'"' & | ||
| 138 | − | ) | |
| 139 | |||
| 140 | end if | ||
| 141 | case default | ||
| 142 | call print_warning( & | ||
| 143 | − | 'ReLU activation: unknown attribute '//trim(attributes(i)%name) & | |
| 144 | − | ) | |
| 145 | end select | ||
| 146 | end do | ||
| 147 | |||
| 148 | − | end subroutine apply_attributes_relu | |
| 149 | !############################################################################### | ||
| 150 | |||
| 151 | |||
| 152 | !############################################################################### | ||
| 153 | − | pure function export_attributes_relu(this) result(attributes) | |
| 154 | !! Export ReLU activation function attributes as ONNX attributes | ||
| 155 | implicit none | ||
| 156 | |||
| 157 | ! Arguments | ||
| 158 | class(relu_actv_type), intent(in) :: this | ||
| 159 | !! ReLU activation type | ||
| 160 | type(onnx_attribute_type), allocatable, dimension(:) :: attributes | ||
| 161 | !! Array of ONNX attributes | ||
| 162 | |||
| 163 | ! Local variables | ||
| 164 | character(50) :: buffer | ||
| 165 | !! Temporary string buffer | ||
| 166 | |||
| 167 | − | allocate(attributes(3)) | |
| 168 | |||
| 169 | − | write(buffer, '(A)') this%name | |
| 170 | − | attributes(1) = onnx_attribute_type( & | |
| 171 | − | "name", "string", trim(adjustl(buffer)) ) | |
| 172 | |||
| 173 | − | write(buffer, '(F10.6)') this%scale | |
| 174 | − | attributes(2) = onnx_attribute_type( & | |
| 175 | − | "scale", "float", trim(adjustl(buffer)) ) | |
| 176 | |||
| 177 | − | write(buffer, '(F10.6)') this%threshold | |
| 178 | − | attributes(3) = onnx_attribute_type( & | |
| 179 | − | "threshold", "float", trim(adjustl(buffer)) ) | |
| 180 | |||
| 181 | − | end function export_attributes_relu | |
| 182 | !############################################################################### | ||
| 183 | |||
| 184 | |||
| 185 | !############################################################################### | ||
| 186 | − | function apply_relu(this, val) result(output) | |
| 187 | !! Apply ReLU activation to 1D array | ||
| 188 | !! | ||
| 189 | !! Computes: f = max(0,x) | ||
| 190 | implicit none | ||
| 191 | |||
| 192 | ! Arguments | ||
| 193 | class(relu_actv_type), intent(in) :: this | ||
| 194 | !! ReLU activation type | ||
| 195 | type(array_type), intent(in) :: val | ||
| 196 | !! Input values | ||
| 197 | type(array_type), pointer :: output | ||
| 198 | !! Activated output values | ||
| 199 | |||
| 200 | − | if(this%apply_scaling)then | |
| 201 | − | output => max(val, this%threshold) * this%scale | |
| 202 | else | ||
| 203 | − | output => max(val, this%threshold) | |
| 204 | end if | ||
| 205 | − | end function apply_relu | |
| 206 | !############################################################################### | ||
| 207 | |||
| 208 | − | end module athena__activation_relu | |
| 209 |