XCSF 1.4.8
XCSF learning classifier system
Loading...
Searching...
No Matches
neural_layer_args.c
Go to the documentation of this file.
1/*
2 * This program is free software: you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation, either version 3 of the License, or
5 * (at your option) any later version.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 *
12 * You should have received a copy of the GNU General Public License
13 * along with this program. If not, see <http://www.gnu.org/licenses/>.
14 */
15
24#include "neural_activations.h"
29#include "neural_layer_lstm.h"
31#include "neural_layer_noise.h"
35#include "utils.h"
36
41void
43{
44 args->type = CONNECTED;
45 args->n_inputs = 0;
46 args->n_init = 0;
47 args->n_max = 0;
48 args->max_neuron_grow = 0;
49 args->function = LOGISTIC;
51 args->height = 0;
52 args->width = 0;
53 args->channels = 0;
54 args->size = 0;
55 args->stride = 0;
56 args->pad = 0;
57 args->eta = 0;
58 args->eta_min = 0;
59 args->momentum = 0;
60 args->decay = 0;
61 args->probability = 0;
62 args->scale = 0;
63 args->evolve_weights = false;
64 args->evolve_neurons = false;
65 args->evolve_functions = false;
66 args->evolve_eta = false;
67 args->evolve_connect = false;
68 args->sgd_weights = false;
69 args->next = NULL;
70}
71
76struct ArgsLayer *
77layer_args_copy(const struct ArgsLayer *src)
78{
79 struct ArgsLayer *new = malloc(sizeof(struct ArgsLayer));
80 new->type = src->type;
81 new->n_inputs = src->n_inputs;
82 new->n_init = src->n_init;
83 new->n_max = src->n_max;
84 new->max_neuron_grow = src->max_neuron_grow;
85 new->function = src->function;
86 new->recurrent_function = src->recurrent_function;
87 new->height = src->height;
88 new->width = src->width;
89 new->channels = src->channels;
90 new->size = src->size;
91 new->stride = src->stride;
92 new->pad = src->pad;
93 new->eta = src->eta;
94 new->eta_min = src->eta_min;
95 new->momentum = src->momentum;
96 new->decay = src->decay;
97 new->probability = src->probability;
98 new->scale = src->scale;
99 new->evolve_weights = src->evolve_weights;
100 new->evolve_neurons = src->evolve_neurons;
101 new->evolve_functions = src->evolve_functions;
102 new->evolve_eta = src->evolve_eta;
103 new->evolve_connect = src->evolve_connect;
104 new->sgd_weights = src->sgd_weights;
105 new->next = NULL;
106 return new;
107}
108
114static void
115layer_args_json_export_inputs(cJSON *json, const struct ArgsLayer *args)
116{
117 if (layer_receives_images(args->type)) {
118 if (args->height > 0) {
119 cJSON_AddNumberToObject(json, "height", args->height);
120 }
121 if (args->width > 0) {
122 cJSON_AddNumberToObject(json, "width", args->width);
123 }
124 if (args->channels > 0) {
125 cJSON_AddNumberToObject(json, "channels", args->channels);
126 }
127 if (args->size > 0) {
128 cJSON_AddNumberToObject(json, "size", args->size);
129 }
130 if (args->stride > 0) {
131 cJSON_AddNumberToObject(json, "stride", args->stride);
132 }
133 if (args->pad > 0) {
134 cJSON_AddNumberToObject(json, "pad", args->pad);
135 }
136 } else {
137 cJSON_AddNumberToObject(json, "n_inputs", args->n_inputs);
138 }
139}
140
147static bool
148layer_args_json_import_inputs(struct ArgsLayer *args, const cJSON *json)
149{
150 if (strncmp(json->string, "height\0", 7) == 0 && cJSON_IsNumber(json)) {
151 args->height = json->valueint;
152 } else if (strncmp(json->string, "width\0", 6) == 0 &&
153 cJSON_IsNumber(json)) {
154 args->width = json->valueint;
155 } else if (strncmp(json->string, "channels\0", 9) == 0 &&
156 cJSON_IsNumber(json)) {
157 args->channels = json->valueint;
158 } else if (strncmp(json->string, "size\0", 5) == 0 &&
159 cJSON_IsNumber(json)) {
160 args->size = json->valueint;
161 } else if (strncmp(json->string, "stride\0", 7) == 0 &&
162 cJSON_IsNumber(json)) {
163 args->stride = json->valueint;
164 } else if (strncmp(json->string, "pad\0", 4) == 0 && cJSON_IsNumber(json)) {
165 args->pad = json->valueint;
166 } else if (strncmp(json->string, "n_inputs\0", 9) == 0 &&
167 cJSON_IsNumber(json)) {
168 (void) args->n_inputs; // set automatically
169 } else {
170 return false;
171 }
172 return true;
173}
174
180static void
181layer_args_json_export_sgd(cJSON *json, const struct ArgsLayer *args)
182{
183 cJSON_AddBoolToObject(json, "sgd_weights", args->sgd_weights);
184 if (args->sgd_weights) {
185 cJSON_AddNumberToObject(json, "eta", args->eta);
186 cJSON_AddBoolToObject(json, "evolve_eta", args->evolve_eta);
187 if (args->evolve_eta) {
188 cJSON_AddNumberToObject(json, "eta_min", args->eta_min);
189 }
190 cJSON_AddNumberToObject(json, "momentum", args->momentum);
191 cJSON_AddNumberToObject(json, "decay", args->decay);
192 }
193}
194
201static bool
202layer_args_json_import_sgd(struct ArgsLayer *args, const cJSON *json)
203{
204 if (strncmp(json->string, "sgd_weights\0", 12) == 0 && cJSON_IsBool(json)) {
205 args->sgd_weights = true ? json->type == cJSON_True : false;
206 } else if (strncmp(json->string, "eta\0", 4) == 0 && cJSON_IsNumber(json)) {
207 args->eta = json->valuedouble;
208 } else if (strncmp(json->string, "evolve_eta\0", 11) == 0 &&
209 cJSON_IsBool(json)) {
210 args->evolve_eta = true ? json->type == cJSON_True : false;
211 } else if (strncmp(json->string, "eta_min\0", 8) == 0 &&
212 cJSON_IsNumber(json)) {
213 args->eta_min = json->valuedouble;
214 } else if (strncmp(json->string, "momentum\0", 9) == 0 &&
215 cJSON_IsNumber(json)) {
216 args->momentum = json->valuedouble;
217 } else if (strncmp(json->string, "decay\0", 6) == 0 &&
218 cJSON_IsNumber(json)) {
219 args->decay = json->valuedouble;
220 } else {
221 return false;
222 }
223 return true;
224}
225
231static void
232layer_args_json_export_evo(cJSON *json, const struct ArgsLayer *args)
233{
234 cJSON_AddBoolToObject(json, "evolve_weights", args->evolve_weights);
235 cJSON_AddBoolToObject(json, "evolve_functions", args->evolve_functions);
236 cJSON_AddBoolToObject(json, "evolve_connect", args->evolve_connect);
237 cJSON_AddBoolToObject(json, "evolve_neurons", args->evolve_neurons);
238 if (args->evolve_neurons) {
239 cJSON_AddNumberToObject(json, "n_max", args->n_max);
240 cJSON_AddNumberToObject(json, "max_neuron_grow", args->max_neuron_grow);
241 }
242}
243
250static bool
251layer_args_json_import_evo(struct ArgsLayer *args, const cJSON *json)
252{
253 if (strncmp(json->string, "evolve_weights\0", 15) == 0 &&
254 cJSON_IsBool(json)) {
255 args->evolve_weights = true ? json->type == cJSON_True : false;
256 } else if (strncmp(json->string, "evolve_functions\0", 17) == 0 &&
257 cJSON_IsBool(json)) {
258 args->evolve_functions = true ? json->type == cJSON_True : false;
259 } else if (strncmp(json->string, "evolve_connect\0", 15) == 0 &&
260 cJSON_IsBool(json)) {
261 args->evolve_connect = true ? json->type == cJSON_True : false;
262 } else if (strncmp(json->string, "evolve_neurons\0", 15) == 0 &&
263 cJSON_IsBool(json)) {
264 args->evolve_neurons = true ? json->type == cJSON_True : false;
265 } else if (strncmp(json->string, "n_max\0", 6) == 0 &&
266 cJSON_IsNumber(json)) {
267 args->n_max = json->valueint;
268 } else if (strncmp(json->string, "max_neuron_grow\0", 16) == 0 &&
269 cJSON_IsNumber(json)) {
270 args->max_neuron_grow = json->valueint;
271 } else {
272 return false;
273 }
274 return true;
275}
276
282static void
283layer_args_json_export_activation(cJSON *json, const struct ArgsLayer *args)
284{
285 switch (args->type) {
286 case AVGPOOL:
287 case MAXPOOL:
288 case UPSAMPLE:
289 case DROPOUT:
290 case NOISE:
291 case SOFTMAX:
292 return;
293 default:
294 break;
295 }
296 cJSON_AddStringToObject(json, "activation",
298 if (args->type == LSTM) {
299 cJSON_AddStringToObject(
300 json, "recurrent_activation",
302 }
303}
304
311static bool
312layer_args_json_import_activation(struct ArgsLayer *args, const cJSON *json)
313{
314 if (strncmp(json->string, "activation\0", 15) == 0 &&
315 cJSON_IsString(json)) {
316 args->function = neural_activation_as_int(json->valuestring);
317 } else if (strncmp(json->string, "recurrent_activation\0", 17) == 0 &&
318 cJSON_IsString(json)) {
319 args->recurrent_function = neural_activation_as_int(json->valuestring);
320 } else {
321 return false;
322 }
323 return true;
324}
325
332static bool
333layer_args_json_export_scale(cJSON *json, const struct ArgsLayer *args)
334{
335 bool cont = false;
336 if (args->type == NOISE || args->type == DROPOUT) {
337 cJSON_AddNumberToObject(json, "probability", args->probability);
338 cont = true;
339 }
340 if (args->type == NOISE || args->type == SOFTMAX) {
341 cJSON_AddNumberToObject(json, "scale", args->scale);
342 cont = true;
343 }
344 if (args->type == MAXPOOL) {
345 cont = true;
346 }
347 return cont;
348}
349
356static bool
357layer_args_json_import_scale(struct ArgsLayer *args, const cJSON *json)
358{
359 if (strncmp(json->string, "probability\0", 15) == 0 &&
360 cJSON_IsNumber(json)) {
361 args->probability = json->valuedouble;
362 } else if (strncmp(json->string, "scale\0", 6) == 0 &&
363 cJSON_IsNumber(json)) {
364 args->scale = json->valuedouble;
365 } else {
366 return false;
367 }
368 return true;
369}
370
376char *
378{
379 struct Net net; // create a temporary network to parse inputs
380 neural_init(&net);
381 neural_create(&net, args);
382 neural_free(&net);
383 cJSON *json = cJSON_CreateObject();
384 int cnt = 0;
385 for (const struct ArgsLayer *a = args; a != NULL; a = a->next) {
386 char name[256];
387 snprintf(name, 256, "layer_%d", cnt);
388 ++cnt;
389 cJSON *l = cJSON_CreateObject();
390 cJSON_AddItemToObject(json, name, l);
391 cJSON_AddStringToObject(l, "type", layer_type_as_string(a->type));
395 continue;
396 }
397 if (a->n_init > 0) {
398 cJSON_AddNumberToObject(l, "n_init", a->n_init);
399 }
402 }
403 char *string = cJSON_Print(json);
404 cJSON_Delete(json);
405 return string;
406}
407
414char *
415layer_args_json_import(struct ArgsLayer *args, cJSON *json)
416{
417 for (cJSON *iter = json; iter != NULL; iter = iter->next) {
418 if (strncmp(iter->string, "type\0", 5) == 0 && cJSON_IsString(iter)) {
419 args->type = layer_type_as_int(iter->valuestring);
420 continue;
421 }
422 if (layer_args_json_import_activation(args, iter)) {
423 continue;
424 }
425 if (layer_args_json_import_inputs(args, iter)) {
426 continue;
427 }
428 if (layer_args_json_import_scale(args, iter)) {
429 continue;
430 }
431 if (strncmp(iter->string, "n_init\0", 7) == 0 && cJSON_IsNumber(iter)) {
432 args->n_init = iter->valueint;
433 continue;
434 }
435 if (layer_args_json_import_evo(args, iter)) {
436 continue;
437 }
438 if (layer_args_json_import_sgd(args, iter)) {
439 continue;
440 }
441 return iter->string;
442 }
443 return NULL;
444}
445
450void
452{
453 while (*largs != NULL) {
454 struct ArgsLayer *arg = *largs;
455 *largs = (*largs)->next;
456 free(arg);
457 }
458}
459
464static void
466{
467 if (arg->type == DROPOUT || arg->type == NOISE) {
468 if (arg->n_inputs < 1) {
469 arg->n_inputs = arg->channels * arg->height * arg->width;
470 } else if (arg->channels < 1 || arg->height < 1 || arg->width < 1) {
471 arg->channels = 1;
472 arg->height = 1;
473 arg->width = arg->n_inputs;
474 }
475 }
476 if (layer_receives_images(arg->type)) {
477 if (arg->channels < 1) {
478 printf("Error: input channels < 1\n");
479 exit(EXIT_FAILURE);
480 }
481 if (arg->height < 1) {
482 printf("Error: input height < 1\n");
483 exit(EXIT_FAILURE);
484 }
485 if (arg->width < 1) {
486 printf("Error: input width < 1\n");
487 exit(EXIT_FAILURE);
488 }
489 } else if (arg->n_inputs < 1) {
490 printf("Error: number of inputs < 1\n");
491 exit(EXIT_FAILURE);
492 }
493}
494
499void
501{
502 struct ArgsLayer *arg = args;
503 if (arg == NULL) {
504 printf("Error: empty layer argument list\n");
505 exit(EXIT_FAILURE);
506 }
508 do {
509 if (arg->evolve_neurons && arg->max_neuron_grow < 1) {
510 printf("Error: evolving neurons but max_neuron_grow < 1\n");
511 exit(EXIT_FAILURE);
512 }
513 if (arg->n_max < arg->n_init) {
514 arg->n_max = arg->n_init;
515 }
516 arg = arg->next;
517 } while (arg != NULL);
518}
519
525struct ArgsLayer *
527{
528 struct ArgsLayer *tail = head;
529 while (tail->next != NULL) {
530 tail = tail->next;
531 }
532 return tail;
533}
534
540uint32_t
541layer_args_opt(const struct ArgsLayer *args)
542{
543 uint32_t lopt = 0;
544 if (args->evolve_eta) {
545 lopt |= LAYER_EVOLVE_ETA;
546 }
547 if (args->sgd_weights) {
548 lopt |= LAYER_SGD_WEIGHTS;
549 }
550 if (args->evolve_weights) {
551 lopt |= LAYER_EVOLVE_WEIGHTS;
552 }
553 if (args->evolve_neurons) {
554 lopt |= LAYER_EVOLVE_NEURONS;
555 }
556 if (args->evolve_functions) {
558 }
559 if (args->evolve_connect) {
560 lopt |= LAYER_EVOLVE_CONNECT;
561 }
562 return lopt;
563}
564
570static int
571layer_args_length(const struct ArgsLayer *args)
572{
573 int n = 0;
574 const struct ArgsLayer *iter = args;
575 while (iter != NULL) {
576 iter = iter->next;
577 ++n;
578 }
579 return n;
580}
581
588size_t
589layer_args_save(const struct ArgsLayer *args, FILE *fp)
590{
591 size_t s = 0;
592 const int n = layer_args_length(args);
593 s += fwrite(&n, sizeof(int), 1, fp);
594 const struct ArgsLayer *iter = args;
595 while (iter != NULL) {
596 s += fwrite(&iter->type, sizeof(int), 1, fp);
597 s += fwrite(&iter->n_inputs, sizeof(int), 1, fp);
598 s += fwrite(&iter->n_init, sizeof(int), 1, fp);
599 s += fwrite(&iter->n_max, sizeof(int), 1, fp);
600 s += fwrite(&iter->max_neuron_grow, sizeof(int), 1, fp);
601 s += fwrite(&iter->function, sizeof(int), 1, fp);
602 s += fwrite(&iter->recurrent_function, sizeof(int), 1, fp);
603 s += fwrite(&iter->height, sizeof(int), 1, fp);
604 s += fwrite(&iter->width, sizeof(int), 1, fp);
605 s += fwrite(&iter->channels, sizeof(int), 1, fp);
606 s += fwrite(&iter->size, sizeof(int), 1, fp);
607 s += fwrite(&iter->stride, sizeof(int), 1, fp);
608 s += fwrite(&iter->pad, sizeof(int), 1, fp);
609 s += fwrite(&iter->eta, sizeof(double), 1, fp);
610 s += fwrite(&iter->eta_min, sizeof(double), 1, fp);
611 s += fwrite(&iter->momentum, sizeof(double), 1, fp);
612 s += fwrite(&iter->decay, sizeof(double), 1, fp);
613 s += fwrite(&iter->probability, sizeof(double), 1, fp);
614 s += fwrite(&iter->scale, sizeof(double), 1, fp);
615 s += fwrite(&iter->evolve_weights, sizeof(bool), 1, fp);
616 s += fwrite(&iter->evolve_neurons, sizeof(bool), 1, fp);
617 s += fwrite(&iter->evolve_functions, sizeof(bool), 1, fp);
618 s += fwrite(&iter->evolve_eta, sizeof(bool), 1, fp);
619 s += fwrite(&iter->evolve_connect, sizeof(bool), 1, fp);
620 s += fwrite(&iter->sgd_weights, sizeof(bool), 1, fp);
621 iter = iter->next;
622 }
623 return s;
624}
625
632size_t
633layer_args_load(struct ArgsLayer **largs, FILE *fp)
634{
635 layer_args_free(largs);
636 size_t s = 0;
637 int n = 0;
638 s += fread(&n, sizeof(int), 1, fp);
639 for (int i = 0; i < n; ++i) {
640 struct ArgsLayer *arg = malloc(sizeof(struct ArgsLayer));
641 layer_args_init(arg);
642 s += fread(&arg->type, sizeof(int), 1, fp);
643 s += fread(&arg->n_inputs, sizeof(int), 1, fp);
644 s += fread(&arg->n_init, sizeof(int), 1, fp);
645 s += fread(&arg->n_max, sizeof(int), 1, fp);
646 s += fread(&arg->max_neuron_grow, sizeof(int), 1, fp);
647 s += fread(&arg->function, sizeof(int), 1, fp);
648 s += fread(&arg->recurrent_function, sizeof(int), 1, fp);
649 s += fread(&arg->height, sizeof(int), 1, fp);
650 s += fread(&arg->width, sizeof(int), 1, fp);
651 s += fread(&arg->channels, sizeof(int), 1, fp);
652 s += fread(&arg->size, sizeof(int), 1, fp);
653 s += fread(&arg->stride, sizeof(int), 1, fp);
654 s += fread(&arg->pad, sizeof(int), 1, fp);
655 s += fread(&arg->eta, sizeof(double), 1, fp);
656 s += fread(&arg->eta_min, sizeof(double), 1, fp);
657 s += fread(&arg->momentum, sizeof(double), 1, fp);
658 s += fread(&arg->decay, sizeof(double), 1, fp);
659 s += fread(&arg->probability, sizeof(double), 1, fp);
660 s += fread(&arg->scale, sizeof(double), 1, fp);
661 s += fread(&arg->evolve_weights, sizeof(bool), 1, fp);
662 s += fread(&arg->evolve_neurons, sizeof(bool), 1, fp);
663 s += fread(&arg->evolve_functions, sizeof(bool), 1, fp);
664 s += fread(&arg->evolve_eta, sizeof(bool), 1, fp);
665 s += fread(&arg->evolve_connect, sizeof(bool), 1, fp);
666 s += fread(&arg->sgd_weights, sizeof(bool), 1, fp);
667 if (*largs == NULL) {
668 *largs = arg;
669 } else {
670 struct ArgsLayer *iter = *largs;
671 while (iter->next != NULL) {
672 iter = iter->next;
673 }
674 iter->next = arg;
675 }
676 }
677 return s;
678}
void neural_create(struct Net *net, struct ArgsLayer *arg)
Initialises and creates a new neural network from a parameter list.
Definition neural.c:54
void neural_free(struct Net *net)
Frees a neural network.
Definition neural.c:224
void neural_init(struct Net *net)
Initialises an empty neural network.
Definition neural.c:37
const char * neural_activation_string(const int a)
Returns the name of a specified activation function.
int neural_activation_as_int(const char *a)
Returns the integer representation of an activation function.
Neural network activation functions.
#define LOGISTIC
Logistic [0,1].
int layer_type_as_int(const char *type)
Returns the integer representation of a layer type given a name.
const char * layer_type_as_string(const int type)
Returns a string representation of a layer type from an integer.
bool layer_receives_images(const int type)
Returns a whether a layer type expects images as input.
#define NOISE
Layer type noise.
#define LAYER_EVOLVE_ETA
Layer may evolve rate of gradient descent.
#define LAYER_EVOLVE_FUNCTIONS
Layer may evolve functions.
#define UPSAMPLE
Layer type upsample.
#define LAYER_EVOLVE_WEIGHTS
Layer may evolve weights.
#define LSTM
Layer type LSTM.
#define SOFTMAX
Layer type softmax.
#define LAYER_EVOLVE_NEURONS
Layer may evolve neurons.
#define LAYER_EVOLVE_CONNECT
Layer may evolve connectivity.
#define AVGPOOL
Layer type average pooling.
#define DROPOUT
Layer type dropout.
#define LAYER_SGD_WEIGHTS
Layer may perform gradient descent.
#define MAXPOOL
Layer type maxpooling.
#define CONNECTED
Layer type connected.
static void layer_args_json_export_activation(cJSON *json, const struct ArgsLayer *args)
Adds layer activation function to a json object.
static bool layer_args_json_export_scale(cJSON *json, const struct ArgsLayer *args)
Adds layer scaling parameters to a json object.
static bool layer_args_json_import_inputs(struct ArgsLayer *args, const cJSON *json)
Sets the layer input parameters from a cJSON object.
size_t layer_args_load(struct ArgsLayer **largs, FILE *fp)
Loads neural network layer parameters.
void layer_args_init(struct ArgsLayer *args)
Sets layer parameters to default values.
uint32_t layer_args_opt(const struct ArgsLayer *args)
Returns a bitstring representing the permissions granted by a layer.
void layer_args_free(struct ArgsLayer **largs)
Frees memory used by a list of layer parameters and points to NULL.
size_t layer_args_save(const struct ArgsLayer *args, FILE *fp)
Saves neural network layer parameters.
void layer_args_validate(struct ArgsLayer *args)
Checks network layer arguments are valid.
static bool layer_args_json_import_evo(struct ArgsLayer *args, const cJSON *json)
Sets the layer evolutionary parameters from a cJSON object.
static void layer_args_json_export_sgd(cJSON *json, const struct ArgsLayer *args)
Adds layer gradient descent parameters to a json object.
static void layer_args_json_export_inputs(cJSON *json, const struct ArgsLayer *args)
Adds layer input parameters to a json object.
static void layer_args_validate_inputs(struct ArgsLayer *arg)
Checks input layer arguments are valid.
char * layer_args_json_export(struct ArgsLayer *args)
Returns a json formatted string of the neural layer parameters.
static bool layer_args_json_import_sgd(struct ArgsLayer *args, const cJSON *json)
Sets the layer SGD parameters from a cJSON object.
char * layer_args_json_import(struct ArgsLayer *args, cJSON *json)
Sets the layer parameters from a cJSON object.
static bool layer_args_json_import_scale(struct ArgsLayer *args, const cJSON *json)
Sets the layer scaling parameters from a cJSON object.
static void layer_args_json_export_evo(cJSON *json, const struct ArgsLayer *args)
Adds layer evolutionary parameters to a json object.
struct ArgsLayer * layer_args_tail(struct ArgsLayer *head)
Returns the current output layer arguments.
static int layer_args_length(const struct ArgsLayer *args)
Returns the length of the neural network layer parameter list.
static bool layer_args_json_import_activation(struct ArgsLayer *args, const cJSON *json)
Sets the layer activation from a cJSON object.
struct ArgsLayer * layer_args_copy(const struct ArgsLayer *src)
Creates and returns a copy of specified layer parameters.
An implementation of an average pooling layer.
An implementation of a fully-connected layer of perceptrons.
An implementation of a 2D convolutional layer.
An implementation of a dropout layer.
An implementation of a long short-term memory layer.
An implementation of a 2D maxpooling layer.
An implementation of a Gaussian noise adding layer.
An implementation of a recurrent layer of perceptrons.
An implementation of a softmax layer.
An implementation of a 2D upsampling layer.
Parameters for initialising a neural network layer.
_Bool evolve_weights
Ability to evolve weights.
int n_init
Initial number of units / neurons / filters.
_Bool evolve_neurons
Ability to evolve number of units.
_Bool sgd_weights
Ability to update weights with gradient descent.
_Bool evolve_functions
Ability to evolve activation function.
double decay
Weight decay for gradient descent.
double momentum
Momentum for gradient descent.
int function
Activation function.
_Bool evolve_eta
Ability to evolve gradient descent rate.
int max_neuron_grow
Maximum number neurons to add per mutation event.
double probability
Usage depends on layer implementation.
double eta
Gradient descent rate.
int pad
Pool and Conv.
double eta_min
Current gradient descent rate.
int channels
Pool, Conv, and Upsample.
int n_max
Maximum number of units / neurons.
double scale
Usage depends on layer implementation.
int size
Pool and Conv.
int width
Pool, Conv, and Upsample.
int height
Pool, Conv, and Upsample.
int stride
Pool, Conv, and Upsample.
int n_inputs
Number of inputs.
int type
Layer type: CONNECTED, DROPOUT, etc.
struct ArgsLayer * next
Next layer parameters.
_Bool evolve_connect
Ability to evolve weight connectivity.
int recurrent_function
Recurrent activation function.
Neural network data structure.
Definition neural.h:48
Utility functions for random number handling, etc.