MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
MultiLayerNeuralNetworkTests.hpp
Go to the documentation of this file.
1 
25 #ifndef MULTILAYERNEURALNETSTESTS_HPP_
26 #define MULTILAYERNEURALNETSTESTS_HPP_
27 
28 
29 #include <gtest/gtest.h>
30 
31 // Redefine "private" and "protected" so every class field/method will be accessible for tests.
32 #define private public
33 #define protected public
35 
36 
37 namespace mic { namespace neural_nets { namespace unit_tests {
38 
39 
44 class Simple2LayerRegressionNN : public ::testing::Test {
45 public:
46  // Constructor. Sets layer size.
48  nn("simple_linear_network")
49  {
50  // Add
51  nn.pushLayer(new mic::mlnn::fully_connected::Linear<double>(10, 20, "First Linear"));
53  nn.pushLayer(new mic::mlnn::fully_connected::Linear<double>(20, 4, "Second Linear"));
56  }
57 
58 protected:
59  virtual void SetUp() {
60  // Reset the gradients.
61  nn.resetGrads();
62  // Set batch size to 1.
63  nn.resizeBatch(1);
64  }
65 
66 private:
67  // Neural network.
69 };
70 
71 
72 
79 class Tutorial2LayerNN : public ::testing::Test {
80 public:
81  // Constructor. Sets layer size.
83  nn("simple_linear_network")
84  {
85  // Add
91 
92  input_x = MAKE_MATRIX_PTR(double, 2, 1);
93  target_y = MAKE_MATRIX_PTR(double, 2, 1);
94  ffpass1_lin1_y = MAKE_MATRIX_PTR(double, 2, 1);
95  ffpass1_sig1_y = MAKE_MATRIX_PTR(double, 2, 1);
96  ffpass1_lin2_y = MAKE_MATRIX_PTR(double, 2, 1);
97  ffpass1_sig2_y = MAKE_MATRIX_PTR(double, 2, 1);
98  ffpass1_dy = MAKE_MATRIX_PTR(double, 2, 1);
99  bwpass1_lin2_dW = MAKE_MATRIX_PTR(double, 2, 2);
100  bwpass1_lin2_pW_updated = MAKE_MATRIX_PTR(double, 2, 2);
101  bwpass1_lin1_dW = MAKE_MATRIX_PTR(double, 2, 2);
102  bwpass1_lin1_pW_updated = MAKE_MATRIX_PTR(double, 2, 2);
103 }
104 
105 protected:
106  virtual void SetUp() {
107  // Reset the gradients.
108  nn.resetGrads();
109  // Set batch size to 1.
110  nn.resizeBatch(1);
111 
112  // Set weights and biases.
113  (*nn.layers[0]->p["W"]) << 0.15, .20, .25, .30;
114  (*nn.layers[0]->p["b"]) << .35, 0.35;
115 
116  (*nn.layers[2]->p["W"]) << 0.4, .45, .5, .55;
117  (*nn.layers[2]->p["b"]) << .6, 0.6;
118 
119  (*input_x) << 0.05, 0.1;
120  (*target_y) << 0.01, 0.99;
121 
122  // Values after the first forward pass.
123  // Linear1
124  (*ffpass1_lin1_y) << 0.3775, 0.3925;
125  // Sigmoid1
126  (*ffpass1_sig1_y) << 0.59327, 0.596884;
127  // Linear2
128  (*ffpass1_lin2_y) << 1.10591, 1.22492;
129  // Sigmoid2
130  (*ffpass1_sig2_y) << 0.751365, 0.772928;
131 
132  // Loss and gradient.
133  ffpass1_loss = 0.298371;
134  (*ffpass1_dy) << 0.741365, -0.217072;
135 
136  // backward pass - weights gradients.
137  (*bwpass1_lin2_dW) << 0.082167, 0.0826676, -0.0226025, -0.0227402;
138  (*bwpass1_lin1_dW) << 0.000438568, 0.000877135, 0.000497713, 0.000995425;
139 
140  // Weights after update with gradient descent learning rule with learning rate = 0.5 (in this case: delta rule).
141  (*bwpass1_lin2_pW_updated) << 0.358916, 0.408666, 0.511301, 0.56137;
142  (*bwpass1_lin1_pW_updated) << 0.149781, 0.199561, 0.249751, 0.299502;
143  }
144 
145 
146 private:
147  // Neural network.
149 
150  // Test input x - used in forward pass.
151  mic::types::MatrixPtr<double> input_x;
152 
153  // Target y values.
154  mic::types::MatrixPtr<double> target_y;
155 
156  // Results of the first forward pass.
157  mic::types::MatrixPtr<double> ffpass1_lin1_y, ffpass1_sig1_y, ffpass1_lin2_y, ffpass1_sig2_y;
158 
159  // Loss and gradient.
160  double ffpass1_loss;
161  mic::types::MatrixPtr<double> ffpass1_dy;
162 
163  // Backward pass.
164  mic::types::MatrixPtr<double> bwpass1_lin2_dW;
165  mic::types::MatrixPtr<double> bwpass1_lin2_pW_updated;
166  mic::types::MatrixPtr<double> bwpass1_lin1_dW;
167  mic::types::MatrixPtr<double> bwpass1_lin1_pW_updated;
168 };
169 
170 } } }//: namespaces
171 
172 #endif /* MULTILAYERNEURALNETSTESTS_HPP_ */
std::vector< std::shared_ptr< mic::mlnn::Layer< eT > > > layers
mic::mlnn::BackpropagationNeuralNetwork< double > nn
mic::mlnn::BackpropagationNeuralNetwork< double > nn
Test Fixture - feed-forward net with 2 layers. A "formalized" example from a step-by-step tutorial: h...