XCSF 1.4.8
XCSF learning classifier system
Loading...
Searching...
No Matches
neural_activations.h
Go to the documentation of this file.
1/*
2 * This program is free software: you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation, either version 3 of the License, or
5 * (at your option) any later version.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 *
12 * You should have received a copy of the GNU General Public License
13 * along with this program. If not, see <http://www.gnu.org/licenses/>.
14 */
15
24#pragma once
25
26#include <math.h>
27
28#define LOGISTIC (0)
29#define RELU (1)
30#define TANH (2)
31#define LINEAR (3)
32#define GAUSSIAN (4)
33#define SIN (5)
34#define COS (6)
35#define SOFT_PLUS (7)
36#define LEAKY (8)
37#define SELU (9)
38#define LOGGY (10)
39#define NUM_ACTIVATIONS (11)
40#define SOFT_MAX (100)
41
42#define STRING_LOGISTIC ("logistic\0")
43#define STRING_RELU ("relu\0")
44#define STRING_TANH ("tanh\0")
45#define STRING_LINEAR ("linear\0")
46#define STRING_GAUSSIAN ("gaussian\0")
47#define STRING_SIN ("sin\0")
48#define STRING_COS ("cos\0")
49#define STRING_SOFT_PLUS ("softplus\0")
50#define STRING_LEAKY ("leaky\0")
51#define STRING_SELU ("selu\0")
52#define STRING_LOGGY ("loggy\0")
53#define STRING_SOFT_MAX ("softmax\0")
54
55double
56neural_activate(const int a, const double x);
57
58double
59neural_gradient(const int a, const double x);
60
61const char *
62neural_activation_string(const int a);
63
64int
65neural_activation_as_int(const char *a);
66
67void
68neural_activate_array(double *state, double *output, const int n, const int a);
69
70void
71neural_gradient_array(const double *state, double *delta, const int n,
72 const int a);
73
74static inline double
75logistic_activate(const double x)
76{
77 return 1. / (1. + exp(-x));
78}
79
80static inline double
81logistic_gradient(const double x)
82{
83 double fx = 1. / (1. + exp(-x));
84 return (1 - fx) * fx;
85}
86
87static inline double
88loggy_activate(const double x)
89{
90 return 2. / (1. + exp(-x)) - 1;
91}
92
93static inline double
94loggy_gradient(const double x)
95{
96 double fx = exp(x);
97 return (2 * fx) / ((fx + 1) * (fx + 1));
98}
99
100static inline double
101gaussian_activate(const double x)
102{
103 return exp(-x * x);
104}
105
106static inline double
107gaussian_gradient(const double x)
108{
109 return -2 * x * exp(-x * x);
110}
111
112static inline double
113relu_activate(const double x)
114{
115 return x * (x > 0);
116}
117
118static inline double
119relu_gradient(const double x)
120{
121 return (x > 0);
122}
123
124static inline double
125selu_activate(const double x)
126{
127 return (x >= 0) * 1.0507 * x + (x < 0) * 1.0507 * 1.6732 * expm1(x);
128}
129
130static inline double
131selu_gradient(const double x)
132{
133 return (x >= 0) * 1.0507 + (x < 0) * (1.0507 * 1.6732 * exp(x));
134}
135
136static inline double
137linear_activate(const double x)
138{
139 return x;
140}
141
142static inline double
143linear_gradient(const double x)
144{
145 (void) x;
146 return 1;
147}
148
149static inline double
150soft_plus_activate(const double x)
151{
152 return log1p(exp(x));
153}
154
155static inline double
156soft_plus_gradient(const double x)
157{
158 return 1. / (1. + exp(-x));
159}
160
161static inline double
162tanh_activate(const double x)
163{
164 return tanh(x);
165}
166
167static inline double
168tanh_gradient(const double x)
169{
170 double t = tanh(x);
171 return 1 - t * t;
172}
173
174static inline double
175leaky_activate(const double x)
176{
177 return (x > 0) ? x : .1 * x;
178}
179
180static inline double
181leaky_gradient(const double x)
182{
183 return (x < 0) ? .1 : 1;
184}
185
186static inline double
187sin_activate(const double x)
188{
189 return sin(x);
190}
191
192static inline double
193sin_gradient(const double x)
194{
195 return cos(x);
196}
197
198static inline double
199cos_activate(const double x)
200{
201 return cos(x);
202}
203
204static inline double
205cos_gradient(const double x)
206{
207 return -sin(x);
208}
double neural_activate(const int a, const double x)
Returns the result from applying a specified activation function.
static double cos_activate(const double x)
static double selu_gradient(const double x)
static double relu_activate(const double x)
static double soft_plus_gradient(const double x)
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
static double linear_gradient(const double x)
static double leaky_gradient(const double x)
static double gaussian_gradient(const double x)
static double gaussian_activate(const double x)
void neural_gradient_array(const double *state, double *delta, const int n, const int a)
Applies a gradient function to a vector of neuron states.
static double loggy_gradient(const double x)
static double soft_plus_activate(const double x)
static double loggy_activate(const double x)
static double cos_gradient(const double x)
int neural_activation_as_int(const char *a)
Returns the integer representation of an activation function.
static double linear_activate(const double x)
static double sin_gradient(const double x)
static double logistic_activate(const double x)
static double leaky_activate(const double x)
static double logistic_gradient(const double x)
void neural_activate_array(double *state, double *output, const int n, const int a)
Applies an activation function to a vector of neuron states.
static double tanh_activate(const double x)
static double tanh_gradient(const double x)
static double relu_gradient(const double x)
static double sin_activate(const double x)
static double selu_activate(const double x)
double neural_gradient(const int a, const double x)
Returns the derivative from applying a specified activation function.