-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathnn.h
executable file
·112 lines (83 loc) · 2.57 KB
/
nn.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
#ifndef __NN_H__
#define __NN_H__
#define f_type double
#include <Eigen/Core>
#include <vector>
namespace nnet
{
typedef Eigen::Matrix<f_type, Eigen::Dynamic, Eigen::Dynamic> matrix_t;
typedef Eigen::Matrix<f_type, Eigen::Dynamic, 1> vector_t;
typedef Eigen::Array<f_type, Eigen::Dynamic, Eigen::Dynamic> array_t;
struct nn_layer
{
size_t size;
matrix_t a, z, delta;
matrix_t W, dEdW;
vector_t b;
};
struct train_param
{
f_type mu, mu_max, mu_scale, min_grad, min_loss;
int max_iter;
};
class neural_net
{
private:
/** Allocate memory and initialize default values. */
void init_layers(const Eigen::VectorXi &topology);
/** Holds the layers of the neural net. */
std::vector<nn_layer> layers_;
/** Training params. */
train_param tparams_;
/** Holds the error gradient, jacobian, ... */
matrix_t j_, jj_;
vector_t je_;
/** Number of adjustable parameters. */
uint nparam_;
/** Scaling parameters. */
vector_t x_shift_;
vector_t x_scale_;
vector_t y_shift_;
vector_t y_scale_;
public:
/** Init neural net with given topology. */
neural_net(const Eigen::VectorXi& topology);
/** Read neural net from file. */
neural_net(const char* filename);
/** Initial weights */
void init_weights();
/** Propagate data through the net.
* Rows of X are instances, columns are features. */
void forward_pass(const matrix_t& X);
/** Compute NN loss w.r.t. input and output data.
* Also backpropogates error.
*/
f_type loss(const matrix_t& X, const matrix_t& Y);
void train(const matrix_t& X, const matrix_t& Y, bool verbose = false);
/** Get training parameters. */
train_param get_train_params() const;
/** Set training parameters. */
void set_train_params(const train_param& params);
/** Return activation of output layer. */
matrix_t get_activation();
/** Get gradient of output(s) w.r.t. input i */
matrix_t get_gradient(int index);
/** Returns the logistic function values f(x) given x. */
static matrix_t activation(const matrix_t& x);
/** Returns the gradient f'(x) of the logistic function given f(x). */
static matrix_t activation_gradient(const matrix_t& x);
/** Set weights and biases. */
void set_wb(const vector_t& wb);
/** Get weights and biases. */
vector_t get_wb() const;
/** Compute autoscale parameters. */
void autoscale(const matrix_t& X, const matrix_t& Y);
/** Reset autoscale parameters */
void autoscale_reset();
/** Write net parameter to file. */
bool write(const char* filename);
/** Destructor. */
~neural_net();
};
}
#endif