61 printf(
"neural_activate(): invalid activation function: %d\n", a);
99 printf(
"neural_gradient(): invalid activation function: %d\n", a);
138 printf(
"neural_activation_string(): invalid activation: %d\n", a);
187 printf(
"neural_activation_as_int(): invalid activation: %s\n", a);
201 for (
int i = 0; i < n; ++i) {
218 for (
int i = 0; i < n; ++i) {
double neural_activate(const int a, const double x)
Returns the result from applying a specified activation function.
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
void neural_gradient_array(const double *state, double *delta, const int n, const int a)
Applies a gradient function to a vector of neuron states.
int neural_activation_as_int(const char *a)
Returns the integer representation of an activation function.
void neural_activate_array(double *state, double *output, const int n, const int a)
Applies an activation function to a vector of neuron states.
double neural_gradient(const int a, const double x)
Returns the derivative from applying a specified activation function.
Neural network activation functions.
#define STRING_GAUSSIAN
Gaussian.
#define STRING_LOGGY
Loggy.
#define STRING_LEAKY
Leaky.
static double cos_activate(const double x)
static double selu_gradient(const double x)
static double relu_activate(const double x)
static double soft_plus_gradient(const double x)
#define STRING_LINEAR
Linear.
#define LOGISTIC
Logistic [0,1].
static double linear_gradient(const double x)
#define SELU
Scaled-exponential linear unit [-1.7581,inf].
static double leaky_gradient(const double x)
static double gaussian_gradient(const double x)
static double gaussian_activate(const double x)
#define LOGGY
Logistic [-1,1].
#define RELU
Rectified linear unit [0,inf].
static double loggy_gradient(const double x)
static double soft_plus_activate(const double x)
static double loggy_activate(const double x)
static double cos_gradient(const double x)
#define STRING_SOFT_MAX
Softmax.
static double linear_activate(const double x)
static double sin_gradient(const double x)
#define LINEAR
Linear [-inf,inf].
static double logistic_activate(const double x)
static double leaky_activate(const double x)
static double logistic_gradient(const double x)
#define LEAKY
Leaky rectified linear unit [-inf,inf].
#define STRING_LOGISTIC
Logistic.
static double tanh_activate(const double x)
#define GAUSSIAN
Gaussian (0,1].
static double tanh_gradient(const double x)
static double relu_gradient(const double x)
static double sin_activate(const double x)
#define SOFT_PLUS
Soft plus [0,inf].
static double selu_activate(const double x)
#define STRING_SOFT_PLUS
Soft plus.
Interface for neural network layers.
#define NEURON_MIN
Minimum neuron state.
#define NEURON_MAX
Maximum neuron state.
Utility functions for random number handling, etc.
static double clamp(const double a, const double min, const double max)
Returns a float clamped within the specified range.