XCSF  1.4.7
XCSF learning classifier system
neural.c
Go to the documentation of this file.
1 /*
2  * This program is free software: you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation, either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program. If not, see <http://www.gnu.org/licenses/>.
14  */
15 
24 #include "neural.h"
25 #include "neural_layer_connected.h"
26 #include "neural_layer_dropout.h"
27 #include "neural_layer_noise.h"
28 #include "neural_layer_recurrent.h"
29 #include "neural_layer_softmax.h"
30 #include "utils.h"
31 
36 void
37 neural_init(struct Net *net)
38 {
39  net->head = NULL;
40  net->tail = NULL;
41  net->n_layers = 0;
42  net->n_inputs = 0;
43  net->n_outputs = 0;
44  net->output = NULL;
45  net->train = false;
46 }
47 
53 void
54 neural_create(struct Net *net, struct ArgsLayer *arg)
55 {
56  neural_init(net);
57  const struct Layer *prev_layer = NULL;
58  while (arg != NULL) {
59  if (prev_layer != NULL) {
60  arg->height = prev_layer->out_h; // pass through n inputs
61  arg->width = prev_layer->out_w;
62  arg->channels = prev_layer->out_c;
63  arg->n_inputs = prev_layer->n_outputs;
64  switch (arg->type) {
65  case AVGPOOL:
66  case MAXPOOL:
67  case DROPOUT:
68  case UPSAMPLE:
69  case SOFTMAX:
70  case NOISE:
71  arg->n_init = prev_layer->n_outputs;
72  break;
73  default:
74  break;
75  }
76  }
77  struct Layer *l = layer_init(arg);
78  neural_push(net, l);
79  prev_layer = l;
80  arg = arg->next;
81  }
82  if (net->n_layers < 1 || net->n_outputs < 1 || net->n_inputs < 1) {
83  printf("neural_create() error: initialising network\n");
84  exit(EXIT_FAILURE);
85  }
86 }
87 
94 void
95 neural_insert(struct Net *net, struct Layer *l, const int pos)
96 {
97  if (net->head == NULL || net->tail == NULL) { // empty list
98  net->head = malloc(sizeof(struct Llist));
99  net->head->layer = l;
100  net->head->prev = NULL;
101  net->head->next = NULL;
102  net->tail = net->head;
103  net->n_inputs = l->n_inputs;
104  net->n_outputs = l->n_outputs;
105  net->output = l->output;
106  } else { // insert
107  struct Llist *iter = net->tail;
108  for (int i = 0; i < pos && iter != NULL; ++i) {
109  iter = iter->prev;
110  }
111  struct Llist *new = malloc(sizeof(struct Llist));
112  new->layer = l;
113  new->prev = iter;
114  if (iter == NULL) { // new head
115  new->next = net->head;
116  net->head->prev = new;
117  net->head = new;
118  net->n_outputs = l->n_outputs;
119  net->output = l->output;
120  } else {
121  new->next = iter->next;
122  iter->next = new;
123  if (iter->next == NULL) { // new tail
124  net->tail = new;
125  net->n_inputs = l->n_inputs;
126  } else { // middle
127  new->next->prev = new;
128  }
129  }
130  }
131  ++(net->n_layers);
132 }
133 
139 void
140 neural_remove(struct Net *net, const int pos)
141 {
142  // find the layer
143  struct Llist *iter = net->tail;
144  for (int i = 0; i < pos && iter != NULL; ++i) {
145  iter = iter->prev;
146  }
147  if (iter == NULL) {
148  printf("neural_layer_remove(): error finding layer to remove\n");
149  exit(EXIT_FAILURE);
150  } else if (iter->next == NULL && iter->prev == NULL) {
151  printf("neural_layer_remove(): attempted to remove the only layer\n");
152  exit(EXIT_FAILURE);
153  }
154  // head
155  if (iter->prev == NULL) {
156  net->head = iter->next;
157  if (iter->next != NULL) {
158  iter->next->prev = NULL;
159  }
160  net->output = net->head->layer->output;
161  net->n_outputs = net->head->layer->n_outputs;
162  }
163  // tail
164  if (iter->next == NULL) {
165  net->tail = iter->prev;
166  if (iter->prev != NULL) {
167  iter->prev->next = NULL;
168  }
169  }
170  // middle
171  if (iter->prev != NULL && iter->next != NULL) {
172  iter->next->prev = iter->prev;
173  iter->prev->next = iter->next;
174  }
175  --(net->n_layers);
176  layer_free(iter->layer);
177  free(iter->layer);
178  free(iter);
179 }
180 
186 void
187 neural_push(struct Net *net, struct Layer *l)
188 {
189  neural_insert(net, l, net->n_layers);
190 }
191 
196 void
197 neural_pop(struct Net *net)
198 {
199  neural_remove(net, net->n_layers - 1);
200 }
201 
207 void
208 neural_copy(struct Net *dest, const struct Net *src)
209 {
210  neural_init(dest);
211  const struct Llist *iter = src->tail;
212  while (iter != NULL) {
213  struct Layer *l = layer_copy(iter->layer);
214  neural_push(dest, l);
215  iter = iter->prev;
216  }
217 }
218 
223 void
224 neural_free(struct Net *net)
225 {
226  struct Llist *iter = net->tail;
227  while (iter != NULL) {
228  layer_free(iter->layer);
229  free(iter->layer);
230  net->tail = iter->prev;
231  free(iter);
232  iter = net->tail;
233  --(net->n_layers);
234  }
235 }
236 
241 void
242 neural_rand(const struct Net *net)
243 {
244  const struct Llist *iter = net->tail;
245  while (iter != NULL) {
246  layer_rand(iter->layer);
247  iter = iter->prev;
248  }
249 }
250 
256 bool
257 neural_mutate(const struct Net *net)
258 {
259  bool mod = false;
260  bool do_resize = false;
261  const struct Layer *prev = NULL;
262  const struct Llist *iter = net->tail;
263  while (iter != NULL) {
264  const int orig_outputs = iter->layer->n_outputs;
265  // if the previous layer has grown or shrunk this layer must be resized
266  if (do_resize) {
267  layer_resize(iter->layer, prev);
268  do_resize = false;
269  }
270  // mutate this layer
271  if (layer_mutate(iter->layer)) {
272  mod = true;
273  }
274  // check if this layer changed size
275  if (iter->layer->n_outputs != orig_outputs) {
276  do_resize = true;
277  }
278  // move to next layer
279  prev = iter->layer;
280  iter = iter->prev;
281  }
282  return mod;
283 }
284 
289 void
290 neural_resize(const struct Net *net)
291 {
292  const struct Layer *prev = NULL;
293  const struct Llist *iter = net->tail;
294  while (iter != NULL) {
295  if (prev != NULL && iter->layer->n_inputs != prev->n_outputs) {
296  layer_resize(iter->layer, prev);
297  }
298  prev = iter->layer;
299  iter = iter->prev;
300  }
301 }
302 
309 void
310 neural_propagate(struct Net *net, const double *input, const bool train)
311 {
312  net->train = train;
313  const struct Llist *iter = net->tail;
314  while (iter != NULL) {
315  layer_forward(iter->layer, net, input);
316  input = layer_output(iter->layer);
317  iter = iter->prev;
318  }
319 }
320 
327 void
328 neural_learn(const struct Net *net, const double *truth, const double *input)
329 {
330  // reset deltas
331  const struct Llist *iter = net->tail;
332  while (iter != NULL) {
333  memset(iter->layer->delta, 0, sizeof(double) * iter->layer->n_outputs);
334  iter = iter->prev;
335  }
336  // calculate output layer delta
337  const struct Layer *p = net->head->layer;
338  for (int i = 0; i < p->n_outputs; ++i) {
339  p->delta[i] = truth[i] - p->output[i];
340  }
341  // backward phase
342  iter = net->head;
343  while (iter != NULL) {
344  const struct Layer *l = iter->layer;
345  if (iter->next == NULL) {
346  layer_backward(l, net, input, 0);
347  } else {
348  const struct Layer *prev = iter->next->layer;
349  layer_backward(l, net, prev->output, prev->delta);
350  }
351  iter = iter->next;
352  }
353  // update phase
354  iter = net->tail;
355  while (iter != NULL) {
356  layer_update(iter->layer);
357  iter = iter->prev;
358  }
359 }
360 
368 double
369 neural_output(const struct Net *net, const int IDX)
370 {
371  if (IDX < 0 || IDX >= net->n_outputs) {
372  printf("neural_output(): error (%d) >= (%d)\n", IDX, net->n_outputs);
373  exit(EXIT_FAILURE);
374  }
375  return layer_output(net->head->layer)[IDX];
376 }
377 
383 double *
384 neural_outputs(const struct Net *net)
385 {
386  return layer_output(net->head->layer);
387 }
388 
394 void
395 neural_print(const struct Net *net, const bool print_weights)
396 {
397  printf("%s\n", neural_json_export(net, print_weights));
398 }
399 
406 char *
407 neural_json_export(const struct Net *net, const bool return_weights)
408 {
409  cJSON *json = cJSON_CreateObject();
410  const struct Llist *iter = net->tail;
411  int i = 0;
412  char layer_name[256];
413  while (iter != NULL) {
414  char *str = layer_json_export(iter->layer, return_weights);
415  cJSON *layer = cJSON_Parse(str);
416  free(str);
417  snprintf(layer_name, 256, "layer_%d", i);
418  cJSON_AddItemToObject(json, layer_name, layer);
419  iter = iter->prev;
420  ++i;
421  }
422  char *string = cJSON_Print(json);
423  cJSON_Delete(json);
424  return string;
425 }
426 
433 void
434 neural_json_import(struct Net *net, const struct ArgsLayer *arg,
435  const cJSON *json)
436 {
437  (void) net;
438  (void) arg;
439  (void) json;
440  printf("Import error: neural networks not yet implemented\n");
441  exit(EXIT_FAILURE);
442 }
443 
449 double
450 neural_size(const struct Net *net)
451 {
452  int size = 0;
453  const struct Llist *iter = net->tail;
454  while (iter != NULL) {
455  const struct Layer *l = iter->layer;
456  switch (l->type) {
457  case CONNECTED:
458  case RECURRENT:
459  case LSTM:
460  case CONVOLUTIONAL:
461  size += l->n_active;
462  break;
463  default:
464  break;
465  }
466  iter = iter->prev;
467  }
468  return size;
469 }
470 
477 size_t
478 neural_save(const struct Net *net, FILE *fp)
479 {
480  size_t s = 0;
481  s += fwrite(&net->n_layers, sizeof(int), 1, fp);
482  s += fwrite(&net->n_inputs, sizeof(int), 1, fp);
483  s += fwrite(&net->n_outputs, sizeof(int), 1, fp);
484  const struct Llist *iter = net->tail;
485  while (iter != NULL) {
486  s += layer_save(iter->layer, fp);
487  iter = iter->prev;
488  }
489  return s;
490 }
491 
498 size_t
499 neural_load(struct Net *net, FILE *fp)
500 {
501  size_t s = 0;
502  int nlayers = 0;
503  int ninputs = 0;
504  int noutputs = 0;
505  s += fread(&nlayers, sizeof(int), 1, fp);
506  s += fread(&ninputs, sizeof(int), 1, fp);
507  s += fread(&noutputs, sizeof(int), 1, fp);
508  neural_init(net);
509  for (int i = 0; i < nlayers; ++i) {
510  struct Layer *l = malloc(sizeof(struct Layer));
511  s += layer_load(l, fp);
512  neural_push(net, l);
513  }
514  return s;
515 }
double * neural_outputs(const struct Net *net)
Returns the outputs from the output layer of a neural network.
Definition: neural.c:384
void neural_push(struct Net *net, struct Layer *l)
Inserts a layer at the head of a neural network.
Definition: neural.c:187
bool neural_mutate(const struct Net *net)
Mutates a neural network.
Definition: neural.c:257
double neural_size(const struct Net *net)
Returns the total number of non-zero weights in a neural network.
Definition: neural.c:450
void neural_create(struct Net *net, struct ArgsLayer *arg)
Initialises and creates a new neural network from a parameter list.
Definition: neural.c:54
double neural_output(const struct Net *net, const int IDX)
Returns the output of a specified neuron in the output layer of a neural network.
Definition: neural.c:369
void neural_resize(const struct Net *net)
Resizes neural network layers as necessary.
Definition: neural.c:290
size_t neural_load(struct Net *net, FILE *fp)
Reads a neural network from a file.
Definition: neural.c:499
void neural_json_import(struct Net *net, const struct ArgsLayer *arg, const cJSON *json)
Creates a neural network from a cJSON object.
Definition: neural.c:434
void neural_learn(const struct Net *net, const double *truth, const double *input)
Performs a gradient descent update on a neural network.
Definition: neural.c:328
void neural_free(struct Net *net)
Frees a neural network.
Definition: neural.c:224
void neural_remove(struct Net *net, const int pos)
Removes a layer from a neural network.
Definition: neural.c:140
void neural_print(const struct Net *net, const bool print_weights)
Prints a neural network.
Definition: neural.c:395
void neural_rand(const struct Net *net)
Randomises the layers within a neural network.
Definition: neural.c:242
void neural_copy(struct Net *dest, const struct Net *src)
Copies a neural network.
Definition: neural.c:208
void neural_init(struct Net *net)
Initialises an empty neural network.
Definition: neural.c:37
void neural_propagate(struct Net *net, const double *input, const bool train)
Forward propagates a neural network.
Definition: neural.c:310
char * neural_json_export(const struct Net *net, const bool return_weights)
Returns a json formatted string representation of a neural network.
Definition: neural.c:407
void neural_pop(struct Net *net)
Removes the layer at the head of a neural network.
Definition: neural.c:197
size_t neural_save(const struct Net *net, FILE *fp)
Writes a neural network to a file.
Definition: neural.c:478
void neural_insert(struct Net *net, struct Layer *l, const int pos)
Inserts a layer into a neural network.
Definition: neural.c:95
An implementation of a multi-layer perceptron neural network.
static void layer_rand(struct Layer *l)
Randomises a layer.
Definition: neural_layer.h:259
#define NOISE
Layer type noise.
Definition: neural_layer.h:31
static void layer_resize(struct Layer *l, const struct Layer *prev)
Resizes a layer using the previous layer's inputs.
Definition: neural_layer.h:228
static double * layer_output(const struct Layer *l)
Returns the outputs of a layer.
Definition: neural_layer.h:169
static size_t layer_save(const struct Layer *l, FILE *fp)
Writes the layer to a file.
Definition: neural_layer.h:373
static struct Layer * layer_init(const struct ArgsLayer *args)
Creates and initialises a new layer.
Definition: neural_layer.h:356
static struct Layer * layer_copy(const struct Layer *src)
Creates and returns a copy of a specified layer.
Definition: neural_layer.h:239
#define UPSAMPLE
Layer type upsample.
Definition: neural_layer.h:38
static void layer_free(const struct Layer *l)
Frees the memory used by the layer.
Definition: neural_layer.h:249
static void layer_backward(const struct Layer *l, const struct Net *net, const double *input, double *delta)
Backward propagates the error through a layer.
Definition: neural_layer.h:194
static size_t layer_load(struct Layer *l, FILE *fp)
Reads the layer from a file.
Definition: neural_layer.h:387
#define LSTM
Layer type LSTM.
Definition: neural_layer.h:34
#define SOFTMAX
Layer type softmax.
Definition: neural_layer.h:32
static void layer_update(const struct Layer *l)
Updates the weights and biases of a layer.
Definition: neural_layer.h:205
#define RECURRENT
Layer type recurrent.
Definition: neural_layer.h:33
static void layer_forward(const struct Layer *l, const struct Net *net, const double *input)
Forward propagates an input through the layer.
Definition: neural_layer.h:181
static char * layer_json_export(const struct Layer *l, const bool return_weights)
Returns a json formatted string representation of a layer.
Definition: neural_layer.h:281
#define AVGPOOL
Layer type average pooling.
Definition: neural_layer.h:37
static bool layer_mutate(struct Layer *l)
Performs layer mutation.
Definition: neural_layer.h:216
#define DROPOUT
Layer type dropout.
Definition: neural_layer.h:30
#define CONVOLUTIONAL
Layer type convolutional.
Definition: neural_layer.h:36
#define MAXPOOL
Layer type maxpooling.
Definition: neural_layer.h:35
#define CONNECTED
Layer type connected.
Definition: neural_layer.h:29
An implementation of a fully-connected layer of perceptrons.
An implementation of a dropout layer.
An implementation of a Gaussian noise adding layer.
An implementation of a recurrent layer of perceptrons.
An implementation of a softmax layer.
Parameters for initialising a neural network layer.
int n_init
Initial number of units / neurons / filters.
int channels
Pool, Conv, and Upsample.
int width
Pool, Conv, and Upsample.
int height
Pool, Conv, and Upsample.
int n_inputs
Number of inputs.
int type
Layer type: CONNECTED, DROPOUT, etc.
struct ArgsLayer * next
Next layer parameters.
Neural network layer data structure.
Definition: neural_layer.h:73
double * output
Current neuron outputs (after activation function)
Definition: neural_layer.h:76
int size
Pool and Conv.
Definition: neural_layer.h:133
int n_inputs
Number of layer inputs.
Definition: neural_layer.h:90
double * i
LSTM.
Definition: neural_layer.h:117
int n_outputs
Number of layer outputs.
Definition: neural_layer.h:91
int n_active
Number of active weights / connections.
Definition: neural_layer.h:96
int out_w
Pool, Conv, and Upsample.
Definition: neural_layer.h:130
int type
Layer type: CONNECTED, DROPOUT, etc.
Definition: neural_layer.h:74
int out_c
Pool, Conv, and Upsample.
Definition: neural_layer.h:132
double * delta
Delta for updating weights.
Definition: neural_layer.h:83
int out_h
Pool, Conv, and Upsample.
Definition: neural_layer.h:131
Forward declaration of layer structure.
Definition: neural.h:39
struct Llist * prev
Pointer to the previous layer (forward)
Definition: neural.h:41
struct Layer * layer
Pointer to the layer data structure.
Definition: neural.h:40
struct Llist * next
Pointer to the next layer (backward)
Definition: neural.h:42
Neural network data structure.
Definition: neural.h:48
int n_layers
Number of layers (hidden + output)
Definition: neural.h:49
double * output
Pointer to the network output.
Definition: neural.h:52
struct Llist * tail
Pointer to the tail layer (first layer)
Definition: neural.h:54
struct Llist * head
Pointer to the head layer (output layer)
Definition: neural.h:53
bool train
Whether the network is in training mode.
Definition: neural.h:55
int n_outputs
Number of network outputs.
Definition: neural.h:51
int n_inputs
Number of network inputs.
Definition: neural.h:50
Utility functions for random number handling, etc.