XCSF  1.4.7
XCSF learning classifier system
neural_layer_softmax.c
Go to the documentation of this file.
1 /*
2  * This program is free software: you can redistribute it and/or modify
3  * it under the terms of the GNU General Public License as published by
4  * the Free Software Foundation, either version 3 of the License, or
5  * (at your option) any later version.
6  *
7  * This program is distributed in the hope that it will be useful,
8  * but WITHOUT ANY WARRANTY; without even the implied warranty of
9  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
10  * GNU General Public License for more details.
11  *
12  * You should have received a copy of the GNU General Public License
13  * along with this program. If not, see <http://www.gnu.org/licenses/>.
14  */
15 
24 #include "neural_layer_softmax.h"
25 #include "neural_activations.h"
26 #include "utils.h"
27 
32 static void
34 {
36  l->output = calloc(l->n_outputs, sizeof(double));
37  l->delta = calloc(l->n_outputs, sizeof(double));
38 }
39 
44 static void
45 free_layer_arrays(const struct Layer *l)
46 {
47  free(l->output);
48  free(l->delta);
49 }
50 
56 void
57 neural_layer_softmax_init(struct Layer *l, const struct ArgsLayer *args)
58 {
59  l->scale = args->scale;
60  l->n_inputs = args->n_inputs;
61  l->n_outputs = args->n_inputs;
62  l->max_outputs = args->n_inputs;
63  l->out_w = l->n_outputs;
64  l->out_c = 1;
65  l->out_h = 1;
67 }
68 
74 struct Layer *
75 neural_layer_softmax_copy(const struct Layer *src)
76 {
77  if (src->type != SOFTMAX) {
78  printf("neural_layer_softmax_copy(): incorrect source layer type\n");
79  exit(EXIT_FAILURE);
80  }
81  struct Layer *l = malloc(sizeof(struct Layer));
82  layer_defaults(l);
83  l->type = src->type;
84  l->layer_vptr = src->layer_vptr;
85  l->scale = src->scale;
86  l->n_inputs = src->n_inputs;
87  l->n_outputs = src->n_outputs;
88  l->max_outputs = src->max_outputs;
89  l->out_w = src->out_w;
90  l->out_c = src->out_c;
91  l->out_h = src->out_h;
93  return l;
94 }
95 
100 void
102 {
103  (void) l;
104 }
105 
112 void
113 neural_layer_softmax_forward(const struct Layer *l, const struct Net *net,
114  const double *input)
115 {
116  (void) net;
117  double largest = input[0];
118  for (int i = 1; i < l->n_inputs; ++i) {
119  if (input[i] > largest) {
120  largest = input[i];
121  }
122  }
123  double sum = 0;
124  for (int i = 0; i < l->n_inputs; ++i) {
125  const double e = exp((input[i] / l->scale) - (largest / l->scale));
126  sum += e;
127  l->output[i] = e;
128  }
129  for (int i = 0; i < l->n_inputs; ++i) {
130  l->output[i] /= sum;
131  }
132 }
133 
141 void
142 neural_layer_softmax_backward(const struct Layer *l, const struct Net *net,
143  const double *input, double *delta)
144 {
145  (void) net;
146  (void) input;
147  if (delta) {
148  for (int i = 0; i < l->n_inputs; ++i) {
149  delta[i] += l->delta[i];
150  }
151  }
152 }
153 
158 void
160 {
161  (void) l;
162 }
163 
169 void
170 neural_layer_softmax_print(const struct Layer *l, const bool print_weights)
171 {
172  char *json_str = neural_layer_softmax_json_export(l, print_weights);
173  printf("%s\n", json_str);
174  free(json_str);
175 }
176 
184 char *
186  const bool return_weights)
187 {
188  (void) return_weights;
189  cJSON *json = cJSON_CreateObject();
190  cJSON_AddStringToObject(json, "type", "softmax");
191  cJSON_AddNumberToObject(json, "n_inputs", l->n_inputs);
192  cJSON_AddNumberToObject(json, "n_outputs", l->n_outputs);
193  cJSON_AddNumberToObject(json, "temperature", l->scale);
194  char *string = cJSON_Print(json);
195  cJSON_Delete(json);
196  return string;
197 }
198 
204 bool
206 {
207  (void) l;
208  return false;
209 }
210 
216 void
217 neural_layer_softmax_resize(struct Layer *l, const struct Layer *prev)
218 {
219  l->n_inputs = prev->n_outputs;
220  l->n_outputs = prev->n_outputs;
221  l->max_outputs = prev->n_outputs;
222  l->out_w = l->n_outputs;
223  l->out_h = 1;
224  l->out_c = 1;
227 }
228 
233 void
235 {
237 }
238 
244 double *
246 {
247  return l->output;
248 }
249 
256 size_t
257 neural_layer_softmax_save(const struct Layer *l, FILE *fp)
258 {
259  size_t s = 0;
260  s += fwrite(&l->n_inputs, sizeof(int), 1, fp);
261  s += fwrite(&l->n_outputs, sizeof(int), 1, fp);
262  s += fwrite(&l->max_outputs, sizeof(int), 1, fp);
263  s += fwrite(&l->scale, sizeof(double), 1, fp);
264  return s;
265 }
266 
273 size_t
274 neural_layer_softmax_load(struct Layer *l, FILE *fp)
275 {
276  size_t s = 0;
277  s += fread(&l->n_inputs, sizeof(int), 1, fp);
278  s += fread(&l->n_outputs, sizeof(int), 1, fp);
279  s += fread(&l->max_outputs, sizeof(int), 1, fp);
280  s += fread(&l->scale, sizeof(double), 1, fp);
281  l->out_w = l->n_outputs;
282  l->out_h = 1;
283  l->out_c = 1;
285  return s;
286 }
Neural network activation functions.
void layer_defaults(struct Layer *l)
Initialises a layer to default values.
Definition: neural_layer.c:413
void layer_guard_outputs(const struct Layer *l)
Check number of outputs is within bounds.
Definition: neural_layer.c:595
#define SOFTMAX
Layer type softmax.
Definition: neural_layer.h:32
double * neural_layer_softmax_output(const struct Layer *l)
Returns the output from a softmax layer.
void neural_layer_softmax_backward(const struct Layer *l, const struct Net *net, const double *input, double *delta)
Backward propagates a softmax layer.
void neural_layer_softmax_free(const struct Layer *l)
Free memory used by a softmax layer.
static void free_layer_arrays(const struct Layer *l)
Free memory used by a softmax layer.
static void malloc_layer_arrays(struct Layer *l)
Allocate memory used by a softmax layer.
char * neural_layer_softmax_json_export(const struct Layer *l, const bool return_weights)
Returns a json formatted string representation of a softmax layer.
struct Layer * neural_layer_softmax_copy(const struct Layer *src)
Initialises and creates a copy of one softmax layer from another.
size_t neural_layer_softmax_load(struct Layer *l, FILE *fp)
Reads a softmax layer from a file.
void neural_layer_softmax_print(const struct Layer *l, const bool print_weights)
Prints a softmax layer.
void neural_layer_softmax_init(struct Layer *l, const struct ArgsLayer *args)
Creates and initialises a softmax layer.
size_t neural_layer_softmax_save(const struct Layer *l, FILE *fp)
Writes a softmax layer to a file.
void neural_layer_softmax_forward(const struct Layer *l, const struct Net *net, const double *input)
Forward propagates a softmax layer.
void neural_layer_softmax_update(const struct Layer *l)
Dummy function since a softmax layer has no weights.
void neural_layer_softmax_rand(struct Layer *l)
Dummy function since softmax layers have no weights.
void neural_layer_softmax_resize(struct Layer *l, const struct Layer *prev)
Resizes a softmax layer if the previous layer has changed size.
bool neural_layer_softmax_mutate(struct Layer *l)
Dummy function since a softmax layer cannot be mutated.
An implementation of a softmax layer.
Parameters for initialising a neural network layer.
double scale
Usage depends on layer implementation.
int n_inputs
Number of inputs.
Neural network layer data structure.
Definition: neural_layer.h:73
double * output
Current neuron outputs (after activation function)
Definition: neural_layer.h:76
int n_inputs
Number of layer inputs.
Definition: neural_layer.h:90
double scale
Usage depends on layer implementation.
Definition: neural_layer.h:98
struct LayerVtbl const * layer_vptr
Functions acting on layers.
Definition: neural_layer.h:100
int max_outputs
Maximum number of neurons in the layer.
Definition: neural_layer.h:92
double * i
LSTM.
Definition: neural_layer.h:117
int n_outputs
Number of layer outputs.
Definition: neural_layer.h:91
int out_w
Pool, Conv, and Upsample.
Definition: neural_layer.h:130
int type
Layer type: CONNECTED, DROPOUT, etc.
Definition: neural_layer.h:74
int out_c
Pool, Conv, and Upsample.
Definition: neural_layer.h:132
double * delta
Delta for updating weights.
Definition: neural_layer.h:83
int out_h
Pool, Conv, and Upsample.
Definition: neural_layer.h:131
Neural network data structure.
Definition: neural.h:48
Utility functions for random number handling, etc.