XCSF  1.4.7
XCSF learning classifier system
neural_activations.h
Go to the documentation of this file.
1 /*
2  * This program is free software: you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation, either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program. If not, see <http://www.gnu.org/licenses/>.
14  */
15 
24 #pragma once
25 
26 #include <math.h>
27 
28 #define LOGISTIC (0)
29 #define RELU (1)
30 #define TANH (2)
31 #define LINEAR (3)
32 #define GAUSSIAN (4)
33 #define SIN (5)
34 #define COS (6)
35 #define SOFT_PLUS (7)
36 #define LEAKY (8)
37 #define SELU (9)
38 #define LOGGY (10)
39 #define NUM_ACTIVATIONS (11)
40 #define SOFT_MAX (100)
41 
42 #define STRING_LOGISTIC ("logistic\0")
43 #define STRING_RELU ("relu\0")
44 #define STRING_TANH ("tanh\0")
45 #define STRING_LINEAR ("linear\0")
46 #define STRING_GAUSSIAN ("gaussian\0")
47 #define STRING_SIN ("sin\0")
48 #define STRING_COS ("cos\0")
49 #define STRING_SOFT_PLUS ("softplus\0")
50 #define STRING_LEAKY ("leaky\0")
51 #define STRING_SELU ("selu\0")
52 #define STRING_LOGGY ("loggy\0")
53 #define STRING_SOFT_MAX ("softmax\0")
54 
55 double
56 neural_activate(const int a, const double x);
57 
58 double
59 neural_gradient(const int a, const double x);
60 
61 const char *
62 neural_activation_string(const int a);
63 
64 int
65 neural_activation_as_int(const char *a);
66 
67 void
68 neural_activate_array(double *state, double *output, const int n, const int a);
69 
70 void
71 neural_gradient_array(const double *state, double *delta, const int n,
72  const int a);
73 
74 static inline double
75 logistic_activate(const double x)
76 {
77  return 1. / (1. + exp(-x));
78 }
79 
80 static inline double
81 logistic_gradient(const double x)
82 {
83  double fx = 1. / (1. + exp(-x));
84  return (1 - fx) * fx;
85 }
86 
87 static inline double
88 loggy_activate(const double x)
89 {
90  return 2. / (1. + exp(-x)) - 1;
91 }
92 
93 static inline double
94 loggy_gradient(const double x)
95 {
96  double fx = exp(x);
97  return (2 * fx) / ((fx + 1) * (fx + 1));
98 }
99 
100 static inline double
101 gaussian_activate(const double x)
102 {
103  return exp(-x * x);
104 }
105 
106 static inline double
107 gaussian_gradient(const double x)
108 {
109  return -2 * x * exp(-x * x);
110 }
111 
112 static inline double
113 relu_activate(const double x)
114 {
115  return x * (x > 0);
116 }
117 
118 static inline double
119 relu_gradient(const double x)
120 {
121  return (x > 0);
122 }
123 
124 static inline double
125 selu_activate(const double x)
126 {
127  return (x >= 0) * 1.0507 * x + (x < 0) * 1.0507 * 1.6732 * expm1(x);
128 }
129 
130 static inline double
131 selu_gradient(const double x)
132 {
133  return (x >= 0) * 1.0507 + (x < 0) * (1.0507 * 1.6732 * exp(x));
134 }
135 
136 static inline double
137 linear_activate(const double x)
138 {
139  return x;
140 }
141 
142 static inline double
143 linear_gradient(const double x)
144 {
145  (void) x;
146  return 1;
147 }
148 
149 static inline double
150 soft_plus_activate(const double x)
151 {
152  return log1p(exp(x));
153 }
154 
155 static inline double
156 soft_plus_gradient(const double x)
157 {
158  return 1. / (1. + exp(-x));
159 }
160 
161 static inline double
162 tanh_activate(const double x)
163 {
164  return tanh(x);
165 }
166 
167 static inline double
168 tanh_gradient(const double x)
169 {
170  double t = tanh(x);
171  return 1 - t * t;
172 }
173 
174 static inline double
175 leaky_activate(const double x)
176 {
177  return (x > 0) ? x : .1 * x;
178 }
179 
180 static inline double
181 leaky_gradient(const double x)
182 {
183  return (x < 0) ? .1 : 1;
184 }
185 
186 static inline double
187 sin_activate(const double x)
188 {
189  return sin(x);
190 }
191 
192 static inline double
193 sin_gradient(const double x)
194 {
195  return cos(x);
196 }
197 
198 static inline double
199 cos_activate(const double x)
200 {
201  return cos(x);
202 }
203 
204 static inline double
205 cos_gradient(const double x)
206 {
207  return -sin(x);
208 }
double neural_activate(const int a, const double x)
Returns the result from applying a specified activation function.
static double cos_activate(const double x)
static double selu_gradient(const double x)
static double relu_activate(const double x)
static double soft_plus_gradient(const double x)
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
static double linear_gradient(const double x)
static double leaky_gradient(const double x)
static double gaussian_gradient(const double x)
static double gaussian_activate(const double x)
void neural_gradient_array(const double *state, double *delta, const int n, const int a)
Applies a gradient function to a vector of neuron states.
static double loggy_gradient(const double x)
static double soft_plus_activate(const double x)
static double loggy_activate(const double x)
static double cos_gradient(const double x)
int neural_activation_as_int(const char *a)
Returns the integer representation of an activation function.
static double linear_activate(const double x)
static double sin_gradient(const double x)
static double logistic_activate(const double x)
static double leaky_activate(const double x)
static double logistic_gradient(const double x)
void neural_activate_array(double *state, double *output, const int n, const int a)
Applies an activation function to a vector of neuron states.
static double tanh_activate(const double x)
static double tanh_gradient(const double x)
static double relu_gradient(const double x)
static double sin_activate(const double x)
static double selu_activate(const double x)
double neural_gradient(const int a, const double x)
Returns the derivative from applying a specified activation function.