-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathevo.h
More file actions
324 lines (289 loc) · 13.5 KB
/
evo.h
File metadata and controls
324 lines (289 loc) · 13.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
#ifndef EVOMORPH_H
#define EVOMORPH_H
#include "kickstart.h"
#include <time.h>
#include <stdio.h>
//#define WRITE_LOSS
//#define WRITE_LOSS_GENETIC
#define TRAIN_PRUNE_LOSS
#define ABLATION_LOSS
#ifdef __SSE__
#include <xmmintrin.h>
__m128d exp_neg_pd(__m128d x);
__m128d tanh_pd(__m128d x);
#endif
#define SSE_ALIGNMENT 16
#define DOUBLE_DIV 128/sizeof(double)
#define TEMP_POOL_SIZE 0x1000000
#define GELU_C 0.044715
#define SELU_LAMBDA 1.0507009873554804934193349852946
#define SELU_ALPHA 1.6732632423543772848170429916717
typedef void (*activation_function)(double* const, const double* const, uint64_t, double);
typedef double (*loss_function)(double* const, const double* const, const double* const, uint64_t, double);
typedef void (*loss_derivative)(double* const, const double* const, const double* const, uint64_t, double);
typedef void (*bias_init)(double* const, uint64_t, double, double);
typedef void (*weight_init)(double** const, uint64_t, uint64_t, double, double);
typedef void (*layer_weight_init)(double* const, uint64_t, double, double);
typedef enum LOSS_FUNC {
LOSS_MSE,
LOSS_MAE,
LOSS_MAPE,
LOSS_HUBER,
LOSS_HUBER_MODIFIED,
LOSS_CROSS_ENTROPY,
LOSS_HINGE
} LOSS_FUNC;
typedef enum ACTIVATION_FUNC {
ACTIVATION_SIGMOID,
ACTIVATION_RELU,
ACTIVATION_TANH,
ACTIVATION_LINEAR,
ACTIVATION_RELU_LEAKY,
ACTIVATION_RELU_PARAMETRIC,
ACTIVATION_ELU,
ACTIVATION_SOFTMAX,
ACTIVATION_SWISH,
ACTIVATION_GELU,
ACTIVATION_SELU
} ACTIVATION_FUNC;
#define ACTIVATION_COUNT ACTIVATION_SELU+1
typedef enum BIAS_FUNC {
BIAS_INITIALIZATION_ZERO,
BIAS_INITIALIZATION_CONST_FLAT,
BIAS_INITIALIZATION_CONST_UNEVEN,
} BIAS_FUNC;
typedef enum WEIGHT_FUNC {
WEIGHT_INITIALIZATION_XAVIER,
WEIGHT_INITIALIZATION_HE,
WEIGHT_INITIALIZATION_LECUN,
WEIGHT_INITIALIZATION_UNIFORM,
WEIGHT_INITIALIZATION_NORMAL,
} WEIGHT_FUNC;
typedef enum LAYER_WEIGHT_FUNC {
LAYER_WEIGHT_INITIALIZATION_UNIFORM,
LAYER_WEIGHT_INITIALIZATION_NORMAL,
LAYER_WEIGHT_INITIALIZATION_STRONG,
LAYER_WEIGHT_INITIALIZATION_PARAMETRIC
} LAYER_WEIGHT_FUNC;
typedef enum LOSS_PARTIAL_FUNC {
LOSS_MSE_PARTIAL,
LOSS_MAE_PARTIAL,
LOSS_MAPE_PARTIAL,
LOSS_HUBER_PARTIAL,
LOSS_HUBER_MODIFIED_PARTIAL,
LOSS_CROSS_ENTROPY_PARTIAL,
LOSS_HINGE_PARTIAL,
} LOSS_PARTIAL_FUNC;
typedef enum ACTIVATION_PARTIAL_FUNC {
ACTIVATION_SIGMOID_PARTIAL,
ACTIVATION_RELU_PARTIAL,
ACTIVATION_TANH_PARTIAL,
ACTIVATION_LINEAR_PARTIAL,
ACTIVATION_RELU_LEAKY_PARTIAL,
ACTIVATION_RELU_PARAMETRIC_PARTIAL,
ACTIVATION_ELU_PARTIAL,
ACTIVATION_SOFTMAX_PARTIAL,
ACTIVATION_SWISH_PARTIAL,
ACTIVATION_GELU_PARTIAL,
ACTIVATION_SELU_PARTIAL
} ACTIVATION_PARTIAL_FUNC;
typedef struct layer layer;
typedef struct layer {
uint64_t* prev;
uint64_t prev_count;
uint64_t prev_capacity;
uint64_t* next;
uint64_t next_count;
uint64_t next_capacity;
uint64_t pass_index;
uint64_t back_direction;
union {
struct {
uint64_t width;
double* output;
double* activated;
// width * number of input weights, in same order as layer** prev
double** weights;
double* bias;
double** weight_gradients;
double* bias_gradients;
double* activation_gradients;
double* prev_weights;
double* prev_weight_gradients;
double parameter_a;
uint64_t gradient_count;
ACTIVATION_FUNC activation;
ACTIVATION_PARTIAL_FUNC derivative;
} layer;
struct {
uint64_t width;
double* output;
} input;
} data;
enum {
LAYER_NODE,
INPUT_NODE,
} tag;
uint8_t simulated;
uint8_t branched;
} layer;
typedef struct network {
pool* mem;
pool temp;
layer** nodes;
uint64_t node_count;
uint64_t node_capacity;
layer* input;
layer* output;
LOSS_FUNC loss;
LOSS_PARTIAL_FUNC derivative;
double* loss_output;
BIAS_FUNC bias;
WEIGHT_FUNC weight;
LAYER_WEIGHT_FUNC layer_weight;
ACTIVATION_FUNC prune;
uint64_t batch_size;
double learning_rate;
double loss_parameter_a;
double weight_parameter_a;
double weight_parameter_b;
double bias_parameter_a;
double bias_parameter_b;
double prev_parameter_a;
double prev_parameter_b;
double prune_parameter_a;
double gradient_clamp;
uint8_t layers_weighted;
} network;
network network_init(pool* const mem, layer* const input, layer* const output, WEIGHT_FUNC w, BIAS_FUNC b, LAYER_WEIGHT_FUNC lw, ACTIVATION_FUNC prune, double weight_a, double weight_b, double bias_a, double bias_b, double prev_a, double prev_b, double prune_a, uint64_t batch_size, double learning_rate, double clamp, LOSS_FUNC l);
uint64_t network_register_layer(network* const net, layer* const node);
void reset_pass_index(network* const net);
void network_build(network* const net);
layer* input_init(pool* const mem, uint64_t width);
layer* layer_init(pool* const mem, uint64_t width, ACTIVATION_FUNC activation, double parameter_a);
void layer_link(network* const net, pool* const mem, uint64_t a, uint64_t b);
void layer_link_backward(network* const net, pool* const mem, uint64_t a, uint64_t b);
void layer_unlink(network* const net, uint64_t a, uint64_t b);
void layer_insert(network* const net, pool* const mem, uint64_t a, uint64_t b, uint64_t c);
void reset_simulation_flags(network* const net, layer* const node);
void sort_connections(network* const net, layer* const prev, layer* const node, uint64_t pass_index);
void allocate_node_weights(network* const net, pool* const mem, layer* const node);
void allocate_weights(network* const net, pool* const mem, layer* const node, uint64_t pass_index);
void clamp_gradient(network* const net, double* item);
void clamp_gradients(network* const net, double* const vector, uint64_t size);
void forward(network* const net, layer* const node, uint64_t pass_index);
void backward(network* const net, layer* const node);
void apply_gradients(network* const net, layer* const node, uint64_t pass_index);
void zero_gradients(network* const net, layer* const node, uint64_t pass_index);
void clear_activation_gradients(network* const net, layer* const node, uint64_t pass_index);
double network_train(network* const net, double** data, uint64_t data_size, double** expected);
void init_params(network* const net, layer* const node, uint64_t pass_index);
void set_seed(time_t seed);
double uniform_distribution(double min, double max);
double normal_distribution(double mean, double std);
double loss_mse(double* const, const double* const, const double* const, uint64_t, double);
double loss_mae(double* const, const double* const, const double* const, uint64_t, double);
double loss_mape(double* const, const double* const, const double* const, uint64_t, double);
double loss_huber(double* const, const double* const, const double* const, uint64_t, double);
double loss_huber_modified(double* const, const double* const, const double* const, uint64_t, double);
double loss_cross_entropy(double* const, const double* const, const double* const, uint64_t, double);
double loss_hinge(double* const, const double* const, const double* const, uint64_t, double);
void activation_sigmoid(double* const, const double* const, uint64_t, double);
void activation_relu(double* const, const double* const, uint64_t, double);
void activation_tanh(double* const, const double* const, uint64_t, double);
void activation_binary_step(double* const, const double* const, uint64_t, double);
void activation_linear(double* const, const double* const, uint64_t, double);
void activation_relu_leaky(double* const, const double* const, uint64_t, double);
void activation_relu_parametric(double* const, const double* const, uint64_t, double);
void activation_elu(double* const, const double* const, uint64_t, double);
void activation_softmax(double* const, const double* const, uint64_t, double);
void activation_swish(double* const, const double* const, uint64_t, double);
void activation_gelu(double* const, const double* const, uint64_t, double);
void activation_selu(double* const, const double* const, uint64_t, double);
void bias_initialization_zero(double* const, uint64_t, double, double);
void bias_initialization_const_flat(double* const, uint64_t, double, double);
void bias_initialization_const_uneven(double* const, uint64_t, double, double);
void weight_initialization_xavier(double** const, uint64_t, uint64_t, double, double);
void weight_initialization_he(double** const, uint64_t, uint64_t, double, double);
void weight_initialization_lecun(double** const, uint64_t, uint64_t, double, double);
void weight_initialization_uniform(double** const, uint64_t, uint64_t, double, double);
void weight_initialization_normal(double** const, uint64_t, uint64_t, double, double);
void layer_weight_initialization_uniform(double* const, uint64_t, double, double);
void layer_weight_initialization_normal(double* const, uint64_t, double, double);
void layer_weight_initialization_strong(double* const, uint64_t, double, double);
void layer_weight_initialization_parametric(double* const, uint64_t, double, double);
void loss_mse_partial(double* const, const double* const, const double* const, uint64_t, double);
void loss_mae_partial(double* const, const double* const, const double* const, uint64_t, double);
void loss_mape_partial(double* const, const double* const, const double* const, uint64_t, double);
void loss_huber_partial(double* const, const double* const, const double* const, uint64_t, double);
void loss_huber_modified_partial(double* const, const double* const, const double* const, uint64_t, double);
void loss_cross_entropy_partial(double* const, const double* const, const double* const, uint64_t, double);
void loss_hinge_partial(double* const, const double* const, const double* const, uint64_t, double);
void activation_sigmoid_partial(double* const, const double* const, uint64_t, double);
void activation_relu_partial(double* const, const double* const, uint64_t, double);
void activation_tanh_partial(double* const, const double* const, uint64_t, double);
void activation_linear_partial(double* const, const double* const, uint64_t, double);
void activation_relu_leaky_partial(double* const, const double* const, uint64_t, double);
void activation_relu_parametric_partial(double* const, const double* const, uint64_t, double);
void activation_elu_partial(double* const, const double* const, uint64_t, double);
void activation_softmax_partial(double* const, const double* const, uint64_t, double);
void activation_swish_partial(double* const, const double* const, uint64_t, double);
void activation_gelu_partial(double* const, const double* const, uint64_t, double);
void activation_selu_partial(double* const, const double* const, uint64_t, double);
void write_node(network* const net, layer* const node, FILE* outfile);
void write_network(network* const net, const char* filename);
void load_nodes(network* const net, pool* const node, FILE* infile);
network load_network(pool* const mem, const char* filename);
typedef struct prediction {
uint64_t class;
double probability;
} prediction;
typedef struct prediction_vector {
uint64_t* class;
double* probability;
uint64_t len;
} prediction_vector;
prediction predict(network* const net, double* input, uint64_t len);
prediction_vector predict_vector(network* const net, pool* const mem, double** input, uint64_t vector_len, uint64_t len);
prediction_vector predict_vector_batched(network* const net, pool* const mem, double*** input, uint64_t sample_count, uint64_t vector_len, uint64_t len);
void network_show(network* const net);
void network_prune(network* const net);
void network_compose_layer(network* const net, layer* const node);
void update_layer_connection_data(network* const net, layer* const node, uint64_t target_id);
void grow_network(network* const net, double** training_data, uint64_t samples, double** expected, uint64_t epochs, uint64_t prune_epoch, uint64_t grow_epoch);
void grow_network_retrain(network* const net, double** training_data, uint64_t samples, double** expected, uint64_t epochs, uint64_t prune_epoch, uint64_t grow_epoch);
layer* grow_layer(pool* const mem);
void network_rebuild(network* const net);
void grow_network_sparse(network* const net, double** training_data, uint64_t samples, double** expected, uint64_t epochs, uint64_t prune_epoch, uint64_t grow_epoch);
void grow_mutation(pool* const mem, double** training_data, uint64_t samples, double** expected, uint64_t epochs, uint64_t prune_epoch, uint64_t grow_epoch, uint64_t fork_count, uint64_t mutation_count, uint64_t initial_depth, uint8_t retrain, uint8_t layers_weighted, uint64_t layer_width, uint64_t batch_size, double learning_rate, WEIGHT_FUNC weight, BIAS_FUNC bias, LAYER_WEIGHT_FUNC layer_weight, ACTIVATION_FUNC prune, ACTIVATION_FUNC node_activation);
layer* deep_copy_node(network* const source_net, network* const net, layer* const source, pool* const mem);
network* deep_copy_network(network* const source, pool* const mem);
void reallocate_weights(network* const net);
void mutation_search_exhaustive(pool* const mem, double** training_data, uint64_t samples, double** expected);
void supergraph_compose(network* const net, ACTIVATION_FUNC node_activation, uint64_t layer_width, double layer_param, uint64_t width, uint64_t depth, uint8_t full_compose);
double network_train_prune_loop(FILE* outfile, network* const net, double** training_data, uint64_t samples, double** expected, uint64_t epochs, uint64_t prune_epoch);
typedef struct supergraph_param_set {
char* filename;
WEIGHT_FUNC wi;
BIAS_FUNC bi;
LAYER_WEIGHT_FUNC li;
ACTIVATION_FUNC prune;
ACTIVATION_FUNC activation;
LOSS_FUNC loss;
double wa;
double wb;
double ba;
double bb;
double la;
double lb;
double pa;
double layer_param;
uint64_t epochs;
uint64_t prune_epoch;
uint64_t supergraph_width;
uint64_t supergraph_depth;
uint64_t layer_width;
uint8_t layers_weighted;
uint8_t full_compose;
} supergraph_param_set;
#endif