39 #define NUM_ACTIVATIONS (11)
40 #define SOFT_MAX (100)
42 #define STRING_LOGISTIC ("logistic\0")
43 #define STRING_RELU ("relu\0")
44 #define STRING_TANH ("tanh\0")
45 #define STRING_LINEAR ("linear\0")
46 #define STRING_GAUSSIAN ("gaussian\0")
47 #define STRING_SIN ("sin\0")
48 #define STRING_COS ("cos\0")
49 #define STRING_SOFT_PLUS ("softplus\0")
50 #define STRING_LEAKY ("leaky\0")
51 #define STRING_SELU ("selu\0")
52 #define STRING_LOGGY ("loggy\0")
53 #define STRING_SOFT_MAX ("softmax\0")
77 return 1. / (1. + exp(-x));
83 double fx = 1. / (1. + exp(-x));
90 return 2. / (1. + exp(-x)) - 1;
97 return (2 * fx) / ((fx + 1) * (fx + 1));
109 return -2 * x * exp(-x * x);
127 return (x >= 0) * 1.0507 * x + (x < 0) * 1.0507 * 1.6732 * expm1(x);
133 return (x >= 0) * 1.0507 + (x < 0) * (1.0507 * 1.6732 * exp(x));
152 return log1p(exp(x));
158 return 1. / (1. + exp(-x));
177 return (x > 0) ? x : .1 * x;
183 return (x < 0) ? .1 : 1;
double neural_activate(const int a, const double x)
Returns the result from applying a specified activation function.
static double cos_activate(const double x)
static double selu_gradient(const double x)
static double relu_activate(const double x)
static double soft_plus_gradient(const double x)
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
static double linear_gradient(const double x)
static double leaky_gradient(const double x)
static double gaussian_gradient(const double x)
static double gaussian_activate(const double x)
void neural_gradient_array(const double *state, double *delta, const int n, const int a)
Applies a gradient function to a vector of neuron states.
static double loggy_gradient(const double x)
static double soft_plus_activate(const double x)
static double loggy_activate(const double x)
static double cos_gradient(const double x)
int neural_activation_as_int(const char *a)
Returns the integer representation of an activation function.
static double linear_activate(const double x)
static double sin_gradient(const double x)
static double logistic_activate(const double x)
static double leaky_activate(const double x)
static double logistic_gradient(const double x)
void neural_activate_array(double *state, double *output, const int n, const int a)
Applies an activation function to a vector of neuron states.
static double tanh_activate(const double x)
static double tanh_gradient(const double x)
static double relu_gradient(const double x)
static double sin_activate(const double x)
static double selu_activate(const double x)
double neural_gradient(const int a, const double x)
Returns the derivative from applying a specified activation function.