-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathLayer.h
More file actions
93 lines (77 loc) · 1.67 KB
/
Layer.h
File metadata and controls
93 lines (77 loc) · 1.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
//
// Created by bobby on 2.12.16.
//
#ifndef FULLCONNECTEDNEURALNET_LAYER_H
#define FULLCONNECTEDNEURALNET_LAYER_H
#include <cmath> //include exp
#include <algorithm> //include rand
#define INIT_MAX 0.1 //max initialize weight
#define INIT_MIN -0.1 //min initialize weight
#define LR 0.001 //learning rate
//random function used for initialisation of weights
static double fRand(double fMin, double fMax)
{
double f = (double)rand() / RAND_MAX;
return fMin + f * (fMax - fMin);
}
//activation function
static double sigma(double x) {
return 1/(1+exp(-x));
}
class Layer {
public:
/**
*@brief number of neurons in this layer, in lower layer, in upper layer
*/
int n, in, ou;
/**
*@brief pointer to upper layer, NULL if layer is output
*/
Layer* up;
/**
*@brief pointer to lower layer, NULL if layer is input
*/
Layer* down;
/**
*@brief output of neurons
*/
double* out;
/**
*@brief ddot of neurons
*/
double* ddot;
/**
*@brief weights of neurons
*/
double* w;
/**
*@brief bias of layer
*/
double* bias;
double* input;
double* down_ddot;
int depth;
int input_dim;
int dim;
/**
* @brief backpropagation
*/
virtual void backProp_layer() = 0;
/**
* @brief forward pass
*/
virtual void forward_layer() = 0;
/**
* @brief changing weights according ddot
*/
virtual void learn() = 0;
/**
* @brief insert new values to input
*/
virtual void update_input(double* in) = 0;
/**
* @brief print
*/
virtual void print() = 0;
};
#endif //FULLCONNECTEDNEURALNET_LAYER_H