XCSF 1.4.8
XCSF learning classifier system
Loading...
Searching...
No Matches
neural_layer_softmax.c
Go to the documentation of this file.
1/*
2 * This program is free software: you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License as published by
4 * the Free Software Foundation, either version 3 of the License, or
5 * (at your option) any later version.
6 *
7 * This program is distributed in the hope that it will be useful,
8 * but WITHOUT ANY WARRANTY; without even the implied warranty of
9 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10 * GNU General Public License for more details.
11 *
12 * You should have received a copy of the GNU General Public License
13 * along with this program. If not, see <http://www.gnu.org/licenses/>.
14 */
15
25#include "neural_activations.h"
26#include "utils.h"
27
32static void
34{
36 l->output = calloc(l->n_outputs, sizeof(double));
37 l->delta = calloc(l->n_outputs, sizeof(double));
38}
39
44static void
45free_layer_arrays(const struct Layer *l)
46{
47 free(l->output);
48 free(l->delta);
49}
50
56void
57neural_layer_softmax_init(struct Layer *l, const struct ArgsLayer *args)
58{
59 l->scale = args->scale;
60 l->n_inputs = args->n_inputs;
61 l->n_outputs = args->n_inputs;
62 l->max_outputs = args->n_inputs;
63 l->out_w = l->n_outputs;
64 l->out_c = 1;
65 l->out_h = 1;
67}
68
74struct Layer *
76{
77 if (src->type != SOFTMAX) {
78 printf("neural_layer_softmax_copy(): incorrect source layer type\n");
79 exit(EXIT_FAILURE);
80 }
81 struct Layer *l = malloc(sizeof(struct Layer));
83 l->type = src->type;
84 l->layer_vptr = src->layer_vptr;
85 l->scale = src->scale;
86 l->n_inputs = src->n_inputs;
87 l->n_outputs = src->n_outputs;
88 l->max_outputs = src->max_outputs;
89 l->out_w = src->out_w;
90 l->out_c = src->out_c;
91 l->out_h = src->out_h;
93 return l;
94}
95
100void
102{
103 (void) l;
104}
105
112void
113neural_layer_softmax_forward(const struct Layer *l, const struct Net *net,
114 const double *input)
115{
116 (void) net;
117 double largest = input[0];
118 for (int i = 1; i < l->n_inputs; ++i) {
119 if (input[i] > largest) {
120 largest = input[i];
121 }
122 }
123 double sum = 0;
124 for (int i = 0; i < l->n_inputs; ++i) {
125 const double e = exp((input[i] / l->scale) - (largest / l->scale));
126 sum += e;
127 l->output[i] = e;
128 }
129 for (int i = 0; i < l->n_inputs; ++i) {
130 l->output[i] /= sum;
131 }
132}
133
141void
142neural_layer_softmax_backward(const struct Layer *l, const struct Net *net,
143 const double *input, double *delta)
144{
145 (void) net;
146 (void) input;
147 if (delta) {
148 for (int i = 0; i < l->n_inputs; ++i) {
149 delta[i] += l->delta[i];
150 }
151 }
152}
153
158void
160{
161 (void) l;
162}
163
169void
170neural_layer_softmax_print(const struct Layer *l, const bool print_weights)
171{
172 char *json_str = neural_layer_softmax_json_export(l, print_weights);
173 printf("%s\n", json_str);
174 free(json_str);
175}
176
184char *
186 const bool return_weights)
187{
188 (void) return_weights;
189 cJSON *json = cJSON_CreateObject();
190 cJSON_AddStringToObject(json, "type", "softmax");
191 cJSON_AddNumberToObject(json, "n_inputs", l->n_inputs);
192 cJSON_AddNumberToObject(json, "n_outputs", l->n_outputs);
193 cJSON_AddNumberToObject(json, "temperature", l->scale);
194 char *string = cJSON_Print(json);
195 cJSON_Delete(json);
196 return string;
197}
198
204bool
206{
207 (void) l;
208 return false;
209}
210
216void
217neural_layer_softmax_resize(struct Layer *l, const struct Layer *prev)
218{
219 l->n_inputs = prev->n_outputs;
220 l->n_outputs = prev->n_outputs;
221 l->max_outputs = prev->n_outputs;
222 l->out_w = l->n_outputs;
223 l->out_h = 1;
224 l->out_c = 1;
227}
228
233void
235{
237}
238
244double *
246{
247 return l->output;
248}
249
256size_t
257neural_layer_softmax_save(const struct Layer *l, FILE *fp)
258{
259 size_t s = 0;
260 s += fwrite(&l->n_inputs, sizeof(int), 1, fp);
261 s += fwrite(&l->n_outputs, sizeof(int), 1, fp);
262 s += fwrite(&l->max_outputs, sizeof(int), 1, fp);
263 s += fwrite(&l->scale, sizeof(double), 1, fp);
264 return s;
265}
266
273size_t
275{
276 size_t s = 0;
277 s += fread(&l->n_inputs, sizeof(int), 1, fp);
278 s += fread(&l->n_outputs, sizeof(int), 1, fp);
279 s += fread(&l->max_outputs, sizeof(int), 1, fp);
280 s += fread(&l->scale, sizeof(double), 1, fp);
281 l->out_w = l->n_outputs;
282 l->out_h = 1;
283 l->out_c = 1;
285 return s;
286}
Neural network activation functions.
void layer_defaults(struct Layer *l)
Initialises a layer to default values.
void layer_guard_outputs(const struct Layer *l)
Check number of outputs is within bounds.
#define SOFTMAX
Layer type softmax.
void neural_layer_softmax_backward(const struct Layer *l, const struct Net *net, const double *input, double *delta)
Backward propagates a softmax layer.
char * neural_layer_softmax_json_export(const struct Layer *l, const bool return_weights)
Returns a json formatted string representation of a softmax layer.
void neural_layer_softmax_free(const struct Layer *l)
Free memory used by a softmax layer.
struct Layer * neural_layer_softmax_copy(const struct Layer *src)
Initialises and creates a copy of one softmax layer from another.
static void free_layer_arrays(const struct Layer *l)
Free memory used by a softmax layer.
static void malloc_layer_arrays(struct Layer *l)
Allocate memory used by a softmax layer.
size_t neural_layer_softmax_load(struct Layer *l, FILE *fp)
Reads a softmax layer from a file.
void neural_layer_softmax_print(const struct Layer *l, const bool print_weights)
Prints a softmax layer.
void neural_layer_softmax_init(struct Layer *l, const struct ArgsLayer *args)
Creates and initialises a softmax layer.
size_t neural_layer_softmax_save(const struct Layer *l, FILE *fp)
Writes a softmax layer to a file.
void neural_layer_softmax_forward(const struct Layer *l, const struct Net *net, const double *input)
Forward propagates a softmax layer.
void neural_layer_softmax_update(const struct Layer *l)
Dummy function since a softmax layer has no weights.
void neural_layer_softmax_rand(struct Layer *l)
Dummy function since softmax layers have no weights.
void neural_layer_softmax_resize(struct Layer *l, const struct Layer *prev)
Resizes a softmax layer if the previous layer has changed size.
bool neural_layer_softmax_mutate(struct Layer *l)
Dummy function since a softmax layer cannot be mutated.
double * neural_layer_softmax_output(const struct Layer *l)
Returns the output from a softmax layer.
An implementation of a softmax layer.
Parameters for initialising a neural network layer.
double scale
Usage depends on layer implementation.
int n_inputs
Number of inputs.
Neural network layer data structure.
double * output
Current neuron outputs (after activation function)
int n_inputs
Number of layer inputs.
double scale
Usage depends on layer implementation.
struct LayerVtbl const * layer_vptr
Functions acting on layers.
int max_outputs
Maximum number of neurons in the layer.
double * i
LSTM.
int n_outputs
Number of layer outputs.
int out_w
Pool, Conv, and Upsample.
int type
Layer type: CONNECTED, DROPOUT, etc.
int out_c
Pool, Conv, and Upsample.
double * delta
Delta for updating weights.
int out_h
Pool, Conv, and Upsample.
Neural network data structure.
Definition neural.h:48
Utility functions for random number handling, etc.