XCSF 1.4.8
XCSF learning classifier system
Loading...
Searching...
No Matches
neural_activations.c
Go to the documentation of this file.
1/*
2 * This program is free software: you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation, either version 3 of the License, or
5 * (at your option) any later version.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 *
12 * You should have received a copy of the GNU General Public License
13 * along with this program. If not, see <http://www.gnu.org/licenses/>.
14 */
15
24#include "neural_activations.h"
25#include "neural_layer.h"
26#include "utils.h"
27
34double
35neural_activate(const int a, const double x)
36{
37 switch (a) {
38 case LOGISTIC:
39 return logistic_activate(x);
40 case RELU:
41 return relu_activate(x);
42 case GAUSSIAN:
43 return gaussian_activate(x);
44 case TANH:
45 return tanh_activate(x);
46 case SIN:
47 return sin_activate(x);
48 case COS:
49 return cos_activate(x);
50 case SOFT_PLUS:
51 return soft_plus_activate(x);
52 case LINEAR:
53 return linear_activate(x);
54 case LEAKY:
55 return leaky_activate(x);
56 case SELU:
57 return selu_activate(x);
58 case LOGGY:
59 return loggy_activate(x);
60 default:
61 printf("neural_activate(): invalid activation function: %d\n", a);
62 exit(EXIT_FAILURE);
63 }
64}
65
72double
73neural_gradient(const int a, const double x)
74{
75 switch (a) {
76 case LOGISTIC:
77 return logistic_gradient(x);
78 case RELU:
79 return relu_gradient(x);
80 case GAUSSIAN:
81 return gaussian_gradient(x);
82 case TANH:
83 return tanh_gradient(x);
84 case SIN:
85 return sin_gradient(x);
86 case COS:
87 return cos_gradient(x);
88 case SOFT_PLUS:
89 return soft_plus_gradient(x);
90 case LINEAR:
91 return linear_gradient(x);
92 case LEAKY:
93 return leaky_gradient(x);
94 case SELU:
95 return selu_gradient(x);
96 case LOGGY:
97 return loggy_gradient(x);
98 default:
99 printf("neural_gradient(): invalid activation function: %d\n", a);
100 exit(EXIT_FAILURE);
101 }
102}
103
109const char *
111{
112 switch (a) {
113 case LOGISTIC:
114 return STRING_LOGISTIC;
115 case RELU:
116 return STRING_RELU;
117 case GAUSSIAN:
118 return STRING_GAUSSIAN;
119 case TANH:
120 return STRING_TANH;
121 case SIN:
122 return STRING_SIN;
123 case COS:
124 return STRING_COS;
125 case SOFT_PLUS:
126 return STRING_SOFT_PLUS;
127 case LINEAR:
128 return STRING_LINEAR;
129 case LEAKY:
130 return STRING_LEAKY;
131 case SELU:
132 return STRING_SELU;
133 case LOGGY:
134 return STRING_LOGGY;
135 case SOFT_MAX:
136 return STRING_SOFT_MAX;
137 default:
138 printf("neural_activation_string(): invalid activation: %d\n", a);
139 exit(EXIT_FAILURE);
140 }
141}
142
148int
150{
151 if (strncmp(a, STRING_LOGISTIC, 9) == 0) {
152 return LOGISTIC;
153 }
154 if (strncmp(a, STRING_RELU, 5) == 0) {
155 return RELU;
156 }
157 if (strncmp(a, STRING_GAUSSIAN, 9) == 0) {
158 return GAUSSIAN;
159 }
160 if (strncmp(a, STRING_TANH, 5) == 0) {
161 return TANH;
162 }
163 if (strncmp(a, STRING_SIN, 4) == 0) {
164 return SIN;
165 }
166 if (strncmp(a, STRING_COS, 4) == 0) {
167 return COS;
168 }
169 if (strncmp(a, STRING_SOFT_PLUS, 10) == 0) {
170 return SOFT_PLUS;
171 }
172 if (strncmp(a, STRING_LINEAR, 7) == 0) {
173 return LINEAR;
174 }
175 if (strncmp(a, STRING_LEAKY, 6) == 0) {
176 return LEAKY;
177 }
178 if (strncmp(a, STRING_SELU, 5) == 0) {
179 return SELU;
180 }
181 if (strncmp(a, STRING_LOGGY, 6) == 0) {
182 return LOGGY;
183 }
184 if (strncmp(a, STRING_SOFT_MAX, 9) == 0) {
185 return SOFT_MAX;
186 }
187 printf("neural_activation_as_int(): invalid activation: %s\n", a);
188 exit(EXIT_FAILURE);
189}
190
198void
199neural_activate_array(double *state, double *output, const int n, const int a)
200{
201 for (int i = 0; i < n; ++i) {
202 state[i] = clamp(state[i], NEURON_MIN, NEURON_MAX);
203 output[i] = neural_activate(a, state[i]);
204 }
205}
206
214void
215neural_gradient_array(const double *state, double *delta, const int n,
216 const int a)
217{
218 for (int i = 0; i < n; ++i) {
219 delta[i] *= neural_gradient(a, state[i]);
220 }
221}
double neural_activate(const int a, const double x)
Returns the result from applying a specified activation function.
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
void neural_gradient_array(const double *state, double *delta, const int n, const int a)
Applies a gradient function to a vector of neuron states.
int neural_activation_as_int(const char *a)
Returns the integer representation of an activation function.
void neural_activate_array(double *state, double *output, const int n, const int a)
Applies an activation function to a vector of neuron states.
double neural_gradient(const int a, const double x)
Returns the derivative from applying a specified activation function.
Neural network activation functions.
#define STRING_GAUSSIAN
Gaussian.
#define STRING_RELU
RELU.
#define STRING_LOGGY
Loggy.
#define STRING_LEAKY
Leaky.
#define TANH
Tanh [-1,1].
#define STRING_SIN
Sine.
static double cos_activate(const double x)
#define STRING_SELU
SELU.
static double selu_gradient(const double x)
static double relu_activate(const double x)
static double soft_plus_gradient(const double x)
#define STRING_TANH
Tanh.
#define STRING_LINEAR
Linear.
#define LOGISTIC
Logistic [0,1].
static double linear_gradient(const double x)
#define SELU
Scaled-exponential linear unit [-1.7581,inf].
static double leaky_gradient(const double x)
static double gaussian_gradient(const double x)
static double gaussian_activate(const double x)
#define LOGGY
Logistic [-1,1].
#define RELU
Rectified linear unit [0,inf].
static double loggy_gradient(const double x)
static double soft_plus_activate(const double x)
static double loggy_activate(const double x)
static double cos_gradient(const double x)
#define STRING_SOFT_MAX
Softmax.
#define STRING_COS
Cos.
static double linear_activate(const double x)
static double sin_gradient(const double x)
#define LINEAR
Linear [-inf,inf].
#define SIN
Sine [-1,1].
static double logistic_activate(const double x)
static double leaky_activate(const double x)
static double logistic_gradient(const double x)
#define LEAKY
Leaky rectified linear unit [-inf,inf].
#define STRING_LOGISTIC
Logistic.
static double tanh_activate(const double x)
#define GAUSSIAN
Gaussian (0,1].
static double tanh_gradient(const double x)
static double relu_gradient(const double x)
#define SOFT_MAX
Softmax.
static double sin_activate(const double x)
#define SOFT_PLUS
Soft plus [0,inf].
static double selu_activate(const double x)
#define COS
Cos [-1,1].
#define STRING_SOFT_PLUS
Soft plus.
Interface for neural network layers.
#define NEURON_MIN
Minimum neuron state.
#define NEURON_MAX
Maximum neuron state.
Utility functions for random number handling, etc.
static double clamp(const double a, const double min, const double max)
Returns a float clamped within the specified range.
Definition utils.h:60