56 l->
mu = malloc(
sizeof(
double) *
N_MU);
286 printf(
"neural_layer_recurrent_copy(): incorrect source layer type\n");
289 struct Layer *l = malloc(
sizeof(
struct Layer));
309 memcpy(l->
mu, src->
mu,
sizeof(
double) *
N_MU);
369 const double *input,
double *
delta)
446 printf(
"%s\n", json_str);
459 const bool return_weights)
461 cJSON *json = cJSON_CreateObject();
462 cJSON_AddStringToObject(json,
"type",
"recurrent");
463 cJSON_AddStringToObject(json,
"activation",
465 cJSON_AddNumberToObject(json,
"n_inputs", l->
n_inputs);
466 cJSON_AddNumberToObject(json,
"n_outputs", l->
n_outputs);
467 cJSON_AddNumberToObject(json,
"eta", l->
eta);
468 cJSON *mutation = cJSON_CreateDoubleArray(l->
mu,
N_MU);
469 cJSON_AddItemToObject(json,
"mutation", mutation);
471 cJSON *il = cJSON_Parse(weights_str);
473 cJSON_AddItemToObject(json,
"input_layer", il);
475 cJSON *sl = cJSON_Parse(weights_str);
477 cJSON_AddItemToObject(json,
"self_layer", sl);
479 cJSON *ol = cJSON_Parse(weights_str);
481 cJSON_AddItemToObject(json,
"output_layer", ol);
482 char *
string = cJSON_Print(json);
497 s += fwrite(&l->
n_inputs,
sizeof(
int), 1, fp);
498 s += fwrite(&l->
n_outputs,
sizeof(
int), 1, fp);
500 s += fwrite(&l->
options,
sizeof(uint32_t), 1, fp);
501 s += fwrite(&l->
function,
sizeof(
int), 1, fp);
502 s += fwrite(&l->
eta,
sizeof(
double), 1, fp);
503 s += fwrite(&l->
n_active,
sizeof(
int), 1, fp);
504 s += fwrite(l->
mu,
sizeof(
double),
N_MU, fp);
523 s += fread(&l->
n_inputs,
sizeof(
int), 1, fp);
524 s += fread(&l->
n_outputs,
sizeof(
int), 1, fp);
526 s += fread(&l->
options,
sizeof(uint32_t), 1, fp);
527 s += fread(&l->
function,
sizeof(
int), 1, fp);
528 s += fread(&l->
eta,
sizeof(
double), 1, fp);
529 s += fread(&l->
n_active,
sizeof(
int), 1, fp);
534 s += fread(l->
mu,
sizeof(
double),
N_MU, fp);
void blas_axpy(const int N, const double ALPHA, const double *X, const int INCX, double *Y, const int INCY)
Multiplies vector X by the scalar ALPHA and adds it to the vector Y.
Basic linear algebra functions.
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
Neural network activation functions.
#define LINEAR
Linear [-inf,inf].
bool layer_mutate_connectivity(struct Layer *l, const double mu_enable, const double mu_disable)
Mutates a layer's connectivity by zeroing weights.
void layer_defaults(struct Layer *l)
Initialises a layer to default values.
int layer_mutate_neurons(const struct Layer *l, const double mu)
Returns the number of neurons to add or remove from a layer.
bool layer_mutate_functions(struct Layer *l, const double mu)
Mutates a layer's activation function by random selection.
char * layer_weight_json(const struct Layer *l, const bool return_weights)
Returns a json formatted string representation of a layer's weights.
void layer_guard_outputs(const struct Layer *l)
Check number of outputs is within bounds.
void layer_add_neurons(struct Layer *l, const int N)
Adds N neurons to a layer. Negative N removes neurons.
bool layer_mutate_eta(struct Layer *l, const double mu)
Mutates the gradient descent rate of a neural layer.
bool layer_mutate_weights(struct Layer *l, const double mu)
Mutates a layer's weights and biases by adding random numbers from a Gaussian normal distribution wit...
static void layer_rand(struct Layer *l)
Randomises a layer.
static void layer_resize(struct Layer *l, const struct Layer *prev)
Resizes a layer using the previous layer's inputs.
#define LAYER_EVOLVE_ETA
Layer may evolve rate of gradient descent.
#define LAYER_EVOLVE_FUNCTIONS
Layer may evolve functions.
static size_t layer_save(const struct Layer *l, FILE *fp)
Writes the layer to a file.
static struct Layer * layer_init(const struct ArgsLayer *args)
Creates and initialises a new layer.
static struct Layer * layer_copy(const struct Layer *src)
Creates and returns a copy of a specified layer.
#define LAYER_EVOLVE_WEIGHTS
Layer may evolve weights.
static void layer_free(const struct Layer *l)
Frees the memory used by the layer.
static void layer_backward(const struct Layer *l, const struct Net *net, const double *input, double *delta)
Backward propagates the error through a layer.
static size_t layer_load(struct Layer *l, FILE *fp)
Reads the layer from a file.
#define LAYER_EVOLVE_NEURONS
Layer may evolve neurons.
#define LAYER_EVOLVE_CONNECT
Layer may evolve connectivity.
static void layer_update(const struct Layer *l)
Updates the weights and biases of a layer.
#define RECURRENT
Layer type recurrent.
static void layer_forward(const struct Layer *l, const struct Net *net, const double *input)
Forward propagates an input through the layer.
#define LAYER_SGD_WEIGHTS
Layer may perform gradient descent.
#define CONNECTED
Layer type connected.
uint32_t layer_args_opt(const struct ArgsLayer *args)
Returns a bitstring representing the permissions granted by a layer.
struct ArgsLayer * layer_args_copy(const struct ArgsLayer *src)
Creates and returns a copy of specified layer parameters.
An implementation of a fully-connected layer of perceptrons.
size_t neural_layer_recurrent_save(const struct Layer *l, FILE *fp)
Writes a recurrent layer to a file.
size_t neural_layer_recurrent_load(struct Layer *l, FILE *fp)
Reads a recurrent layer from a file.
void neural_layer_recurrent_update(const struct Layer *l)
Updates the weights and biases of a recurrent layer.
void neural_layer_recurrent_resize(struct Layer *l, const struct Layer *prev)
Resizes a recurrent layer if the previous layer has changed size.
static bool mutate_eta(struct Layer *l)
Mutates the gradient descent rate used to update a recurrent layer.
struct Layer * neural_layer_recurrent_copy(const struct Layer *src)
Initialises and creates a copy of one recurrent layer from another.
bool neural_layer_recurrent_mutate(struct Layer *l)
Mutates a recurrent layer.
void neural_layer_recurrent_rand(struct Layer *l)
Randomises a recurrent layer weights.
static void free_layer_arrays(const struct Layer *l)
Free memory used by a recurrent layer.
static void malloc_layer_arrays(struct Layer *l)
Allocate memory used by a recurrent layer.
void neural_layer_recurrent_init(struct Layer *l, const struct ArgsLayer *args)
Initialises a recurrent layer.
static void set_layer_n_weights(struct Layer *l)
Sets the total number of weights in a recurrent layer.
static void realloc_layer_arrays(struct Layer *l)
Resize memory used by a recurrent layer.
#define N_MU
Number of mutation rates applied to a recurrent layer.
char * neural_layer_recurrent_json_export(const struct Layer *l, const bool return_weights)
Returns a json formatted string representation of a recurrent layer.
static bool mutate_neurons(struct Layer *l)
Mutates the number of neurons in a recurrent layer.
double * neural_layer_recurrent_output(const struct Layer *l)
Returns the output from a recurrent layer.
static bool mutate_functions(struct Layer *l)
Mutates the activation function of a recurrent layer.
static void set_layer_n_active(struct Layer *l)
Sets the number of active (non-zero) weights in a recurrent layer.
static const int MU_TYPE[(6)]
Self-adaptation method for mutating a recurrent layer.
static bool mutate_weights(struct Layer *l)
Mutates the magnitude of weights and biases in a recurrent layer.
static void set_layer_n_biases(struct Layer *l)
Sets the total number of biases in a recurrent layer.
static void malloc_layers(struct Layer *l)
Allocate memory for the sub-layers.
void neural_layer_recurrent_free(const struct Layer *l)
Free memory used by a recurrent layer.
void neural_layer_recurrent_forward(const struct Layer *l, const struct Net *net, const double *input)
Forward propagates a recurrent layer.
static bool mutate_connectivity(struct Layer *l)
Mutates the number of active weights in a recurrent layer.
void neural_layer_recurrent_backward(const struct Layer *l, const struct Net *net, const double *input, double *delta)
Backward propagates a recurrent layer.
void neural_layer_recurrent_print(const struct Layer *l, const bool print_weights)
Prints a recurrent layer.
An implementation of a recurrent layer of perceptrons.
void sam_init(double *mu, const int N, const int *type)
Initialises self-adaptive mutation rates.
void sam_adapt(double *mu, const int N, const int *type)
Self-adapts mutation rates.
Self-adaptive mutation functions.
#define SAM_RATE_SELECT
Ten normally distributed rates.
Parameters for initialising a neural network layer.
int n_init
Initial number of units / neurons / filters.
int function
Activation function.
int n_max
Maximum number of units / neurons.
int n_inputs
Number of inputs.
int type
Layer type: CONNECTED, DROPOUT, etc.
Neural network layer data structure.
double * output
Current neuron outputs (after activation function)
struct Layer * input_layer
Recursive layer input.
double * state
Current neuron states (before activation function)
int n_inputs
Number of layer inputs.
int n_biases
Number of layer biases.
double * mu
Mutation rates.
int function
Layer activation function.
struct LayerVtbl const * layer_vptr
Functions acting on layers.
int max_outputs
Maximum number of neurons in the layer.
int n_weights
Number of layer weights.
struct Layer * output_layer
Recursive layer output.
int n_outputs
Number of layer outputs.
struct Layer * self_layer
Recursive layer self.
int n_active
Number of active weights / connections.
double * prev_state
Previous state for recursive layers.
int out_w
Pool, Conv, and Upsample.
int type
Layer type: CONNECTED, DROPOUT, etc.
int out_c
Pool, Conv, and Upsample.
double * delta
Delta for updating weights.
uint32_t options
Bitwise layer options permitting evolution, SGD, etc.
int out_h
Pool, Conv, and Upsample.
double eta
Gradient descent rate.
Neural network data structure.
Utility functions for random number handling, etc.