| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | module athena__activation_relu | ||
| 2 | !! Module containing implementation of the ReLU activation function | ||
| 3 | !! | ||
| 4 | !! This module implements the Rectified Linear Unit (ReLU) activation, | ||
| 5 | !! which zeroes negative values while preserving positive values. | ||
| 6 | !! | ||
| 7 | !! Mathematical operation: | ||
| 8 | !! \[ f(x) = \max(0, x) = \begin{cases} x & \text{if } x > 0 \\\\ 0 & \text{if } x \leq 0 \end{cases} \] | ||
| 9 | !! | ||
| 10 | !! Derivative: | ||
| 11 | !! \[ f'(x) = \begin{cases} 1 & \text{if } x > 0 \\\\ 0 & \text{if } x \leq 0 \end{cases} \] | ||
| 12 | !! | ||
| 13 | !! Properties: Non-saturating, sparse activation, mitigates vanishing gradients | ||
| 14 | !! Reference: Nair & Hinton (2010), ICML | ||
| 15 | use coreutils, only: real32, print_warning | ||
| 16 | use diffstruc, only: array_type, operator(*), max | ||
| 17 | use athena__misc_types, only: base_actv_type | ||
| 18 | use athena__misc_types, only: onnx_attribute_type | ||
| 19 | implicit none | ||
| 20 | |||
| 21 | |||
| 22 | private | ||
| 23 | |||
| 24 | public :: relu_actv_type, create_from_onnx_relu_activation | ||
| 25 | |||
| 26 | |||
| 27 | type, extends(base_actv_type) :: relu_actv_type | ||
| 28 | !! Type for ReLU activation function with overloaded procedures | ||
| 29 | contains | ||
| 30 | procedure, pass(this) :: apply => apply_relu | ||
| 31 | procedure, pass(this) :: reset => reset_relu | ||
| 32 | procedure, pass(this) :: apply_attributes => apply_attributes_relu | ||
| 33 | procedure, pass(this) :: export_attributes => export_attributes_relu | ||
| 34 | end type relu_actv_type | ||
| 35 | |||
| 36 | interface relu_actv_type | ||
| 37 | procedure initialise | ||
| 38 | end interface relu_actv_type | ||
| 39 | |||
| 40 | |||
| 41 | |||
| 42 | contains | ||
| 43 | |||
| 44 | !############################################################################### | ||
| 45 |
2/4✓ Branch 0 taken 7 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 7 times.
✗ Branch 3 not taken.
|
41 | function initialise(scale, attributes) result(activation) |
| 46 | !! Initialise a ReLU activation function | ||
| 47 | implicit none | ||
| 48 | |||
| 49 | ! Arguments | ||
| 50 | real(real32), optional, intent(in) :: scale | ||
| 51 | !! Optional scale factor for activation output | ||
| 52 | type(relu_actv_type) :: activation | ||
| 53 | !! ReLU activation type | ||
| 54 | type(onnx_attribute_type), optional, intent(in) :: attributes(:) | ||
| 55 | !! Optional ONNX attributes | ||
| 56 | |||
| 57 | |||
| 58 | 41 | call activation%reset() | |
| 59 | |||
| 60 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 41 times.
|
41 | if(present(scale)) activation%scale = scale |
| 61 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 41 times.
|
41 | if(abs(activation%scale-1._real32) .gt. 1.e-6_real32)then |
| 62 | ✗ | activation%apply_scaling = .true. | |
| 63 | end if | ||
| 64 |
2/2✓ Branch 0 taken 7 times.
✓ Branch 1 taken 34 times.
|
41 | if(present(attributes))then |
| 65 |
4/8✓ Branch 0 taken 7 times.
✗ Branch 1 not taken.
✗ Branch 2 not taken.
✓ Branch 3 taken 7 times.
✗ Branch 5 not taken.
✓ Branch 6 taken 7 times.
✗ Branch 8 not taken.
✓ Branch 9 taken 7 times.
|
7 | call activation%apply_attributes(attributes) |
| 66 | end if | ||
| 67 | |||
| 68 |
2/2✓ Branch 0 taken 7 times.
✓ Branch 1 taken 34 times.
|
82 | end function initialise |
| 69 | !------------------------------------------------------------------------------- | ||
| 70 | 41 | pure subroutine reset_relu(this) | |
| 71 | !! Reset ReLU activation function attributes and variables | ||
| 72 | implicit none | ||
| 73 | |||
| 74 | ! Arguments | ||
| 75 | class(relu_actv_type), intent(inout) :: this | ||
| 76 | !! ReLU activation type | ||
| 77 | |||
| 78 | 41 | this%name = "relu" | |
| 79 | 41 | this%scale = 1._real32 | |
| 80 | 41 | this%threshold = 0._real32 | |
| 81 | 41 | this%apply_scaling = .false. | |
| 82 | |||
| 83 | 41 | end subroutine reset_relu | |
| 84 | !------------------------------------------------------------------------------- | ||
| 85 |
1/2✓ Branch 0 taken 7 times.
✗ Branch 1 not taken.
|
7 | function create_from_onnx_relu_activation(attributes) result(activation) |
| 86 | !! Create ReLU activation function from ONNX attributes | ||
| 87 | implicit none | ||
| 88 | |||
| 89 | ! Arguments | ||
| 90 | type(onnx_attribute_type), dimension(:), intent(in) :: attributes | ||
| 91 | !! Array of ONNX attributes | ||
| 92 | |||
| 93 | class(base_actv_type), allocatable :: activation | ||
| 94 | !! Instance of activation type | ||
| 95 | |||
| 96 |
5/10✗ Branch 0 not taken.
✓ Branch 1 taken 7 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 7 times.
✗ Branch 6 not taken.
✓ Branch 7 taken 7 times.
✗ Branch 10 not taken.
✓ Branch 11 taken 7 times.
✗ Branch 13 not taken.
✓ Branch 14 taken 7 times.
|
7 | allocate(activation, source = relu_actv_type(attributes = attributes)) |
| 97 | |||
| 98 | 7 | end function create_from_onnx_relu_activation | |
| 99 | !############################################################################### | ||
| 100 | |||
| 101 | |||
| 102 | !############################################################################### | ||
| 103 |
1/2✓ Branch 0 taken 9 times.
✗ Branch 1 not taken.
|
9 | subroutine apply_attributes_relu(this, attributes) |
| 104 | !! Load ONNX attributes into ReLU activation function | ||
| 105 | implicit none | ||
| 106 | |||
| 107 | ! Arguments | ||
| 108 | class(relu_actv_type), intent(inout) :: this | ||
| 109 | !! ReLU activation type | ||
| 110 | type(onnx_attribute_type), dimension(:), intent(in) :: attributes | ||
| 111 | !! Array of ONNX attributes | ||
| 112 | |||
| 113 | ! Local variables | ||
| 114 | integer :: i | ||
| 115 | !! Loop variable | ||
| 116 | type(onnx_attribute_type) :: attribute | ||
| 117 | !! Temporary attribute holder | ||
| 118 | character(20), allocatable, dimension(:) :: attribute_names | ||
| 119 | |||
| 120 | ! Load provided attributes | ||
| 121 |
5/8✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 9 times.
✗ Branch 6 not taken.
✓ Branch 7 taken 9 times.
✓ Branch 9 taken 23 times.
✓ Branch 10 taken 9 times.
|
32 | do i=1, size(attributes,dim=1) |
| 122 |
2/4✗ Branch 0 not taken.
✓ Branch 1 taken 23 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 23 times.
|
55 | select case(trim(attributes(i)%name)) |
| 123 | case("scale") | ||
| 124 |
2/4✗ Branch 0 not taken.
✓ Branch 1 taken 9 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 9 times.
|
9 | read(attributes(i)%val,*) this%scale |
| 125 |
2/2✓ Branch 0 taken 1 times.
✓ Branch 1 taken 8 times.
|
18 | if(abs(this%scale-1._real32) .gt. 1.e-6_real32)then |
| 126 | 1 | this%apply_scaling = .true. | |
| 127 | else | ||
| 128 | 8 | this%apply_scaling = .false. | |
| 129 | end if | ||
| 130 | case("threshold") | ||
| 131 |
2/4✗ Branch 0 not taken.
✓ Branch 1 taken 7 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 7 times.
|
7 | read(attributes(i)%val,*) this%threshold |
| 132 | case("name") | ||
| 133 |
5/10✗ Branch 0 not taken.
✓ Branch 1 taken 7 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 7 times.
✓ Branch 8 taken 7 times.
✗ Branch 9 not taken.
✓ Branch 10 taken 7 times.
✗ Branch 11 not taken.
✗ Branch 12 not taken.
✓ Branch 13 taken 7 times.
|
7 | if(trim(attributes(i)%val) .ne. trim(this%name))then |
| 134 | call print_warning( & | ||
| 135 | 'ReLU activation: name attribute "' // & | ||
| 136 | ✗ | trim(attributes(i)%val) // & | |
| 137 | '"" does not match expected "' // trim(this%name)//'"' & | ||
| 138 | ✗ | ) | |
| 139 | |||
| 140 | end if | ||
| 141 | case default | ||
| 142 | call print_warning( & | ||
| 143 | ✗ | 'ReLU activation: unknown attribute '//trim(attributes(i)%name) & | |
| 144 |
4/10✓ Branch 0 taken 23 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 9 times.
✓ Branch 3 taken 7 times.
✓ Branch 4 taken 7 times.
✗ Branch 5 not taken.
✗ Branch 6 not taken.
✗ Branch 7 not taken.
✗ Branch 10 not taken.
✗ Branch 11 not taken.
|
46 | ) |
| 145 | end select | ||
| 146 | end do | ||
| 147 | |||
| 148 | 9 | end subroutine apply_attributes_relu | |
| 149 | !############################################################################### | ||
| 150 | |||
| 151 | |||
| 152 | !############################################################################### | ||
| 153 | 7 | pure function export_attributes_relu(this) result(attributes) | |
| 154 | !! Export ReLU activation function attributes as ONNX attributes | ||
| 155 | implicit none | ||
| 156 | |||
| 157 | ! Arguments | ||
| 158 | class(relu_actv_type), intent(in) :: this | ||
| 159 | !! ReLU activation type | ||
| 160 | type(onnx_attribute_type), allocatable, dimension(:) :: attributes | ||
| 161 | !! Array of ONNX attributes | ||
| 162 | |||
| 163 | ! Local variables | ||
| 164 | character(50) :: buffer | ||
| 165 | !! Temporary string buffer | ||
| 166 | |||
| 167 |
13/24✗ Branch 0 not taken.
✓ Branch 1 taken 7 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 7 times.
✗ Branch 6 not taken.
✓ Branch 7 taken 7 times.
✗ Branch 9 not taken.
✓ Branch 10 taken 7 times.
✗ Branch 12 not taken.
✓ Branch 13 taken 7 times.
✗ Branch 15 not taken.
✓ Branch 16 taken 7 times.
✗ Branch 18 not taken.
✓ Branch 19 taken 7 times.
✓ Branch 21 taken 21 times.
✓ Branch 22 taken 7 times.
✓ Branch 23 taken 21 times.
✗ Branch 24 not taken.
✗ Branch 25 not taken.
✓ Branch 26 taken 21 times.
✗ Branch 27 not taken.
✓ Branch 28 taken 21 times.
✗ Branch 29 not taken.
✓ Branch 30 taken 21 times.
|
28 | allocate(attributes(3)) |
| 168 | |||
| 169 | 7 | write(buffer, '(A)') this%name | |
| 170 | ✗ | attributes(1) = onnx_attribute_type( & | |
| 171 |
6/12✗ Branch 2 not taken.
✓ Branch 3 taken 7 times.
✗ Branch 4 not taken.
✓ Branch 5 taken 7 times.
✗ Branch 6 not taken.
✓ Branch 7 taken 7 times.
✗ Branch 8 not taken.
✓ Branch 9 taken 7 times.
✗ Branch 10 not taken.
✓ Branch 11 taken 7 times.
✓ Branch 12 taken 7 times.
✗ Branch 13 not taken.
|
7 | "name", "string", trim(adjustl(buffer)) ) |
| 172 | |||
| 173 | 7 | write(buffer, '(F10.6)') this%scale | |
| 174 | ✗ | attributes(2) = onnx_attribute_type( & | |
| 175 |
6/12✗ Branch 2 not taken.
✓ Branch 3 taken 7 times.
✗ Branch 4 not taken.
✓ Branch 5 taken 7 times.
✗ Branch 6 not taken.
✓ Branch 7 taken 7 times.
✗ Branch 8 not taken.
✓ Branch 9 taken 7 times.
✗ Branch 10 not taken.
✓ Branch 11 taken 7 times.
✓ Branch 12 taken 7 times.
✗ Branch 13 not taken.
|
7 | "scale", "float", trim(adjustl(buffer)) ) |
| 176 | |||
| 177 | 7 | write(buffer, '(F10.6)') this%threshold | |
| 178 | ✗ | attributes(3) = onnx_attribute_type( & | |
| 179 |
6/12✗ Branch 2 not taken.
✓ Branch 3 taken 7 times.
✗ Branch 4 not taken.
✓ Branch 5 taken 7 times.
✗ Branch 6 not taken.
✓ Branch 7 taken 7 times.
✗ Branch 8 not taken.
✓ Branch 9 taken 7 times.
✗ Branch 10 not taken.
✓ Branch 11 taken 7 times.
✓ Branch 12 taken 7 times.
✗ Branch 13 not taken.
|
7 | "threshold", "float", trim(adjustl(buffer)) ) |
| 180 | |||
| 181 | 7 | end function export_attributes_relu | |
| 182 | !############################################################################### | ||
| 183 | |||
| 184 | |||
| 185 | !############################################################################### | ||
| 186 | 22 | function apply_relu(this, val) result(output) | |
| 187 | !! Apply ReLU activation to 1D array | ||
| 188 | !! | ||
| 189 | !! Computes: f = max(0,x) | ||
| 190 | implicit none | ||
| 191 | |||
| 192 | ! Arguments | ||
| 193 | class(relu_actv_type), intent(in) :: this | ||
| 194 | !! ReLU activation type | ||
| 195 | type(array_type), intent(in) :: val | ||
| 196 | !! Input values | ||
| 197 | type(array_type), pointer :: output | ||
| 198 | !! Activated output values | ||
| 199 | |||
| 200 |
2/2✓ Branch 0 taken 1 times.
✓ Branch 1 taken 21 times.
|
22 | if(this%apply_scaling)then |
| 201 | 1 | output => max(val, this%threshold) * this%scale | |
| 202 | else | ||
| 203 | 21 | output => max(val, this%threshold) | |
| 204 | end if | ||
| 205 | 22 | end function apply_relu | |
| 206 | !############################################################################### | ||
| 207 | |||
| 208 | 237 | end module athena__activation_relu | |
| 209 |