MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
GradientDescentTests.cpp
Go to the documentation of this file.
1 
25 #include <gtest/gtest.h>
26 #include <cmath>
27 
29 
30 // Redefine word "public" so every class field/method will be accessible for tests.
31 #define private public
33 
34 
35 
37 // (S)GD
39 
40 
45 TEST_F(Sphere1DLandscape, GradientDescent_Convergence) {
46  // Optimization function - (S)DG.
48 
49  // Simulate a simple gradient descent.
50  size_t iteration = 0;
51  double abs_diff = 1.0;
52  while (abs_diff > eps) {
53  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
54  // Update with learning rate = 0.1.
55  opt.update(x, dx, 0.1);
56 
57  // Check whether value of the function is finite.
58  double value = fun.calculateValue(x);
59  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
60 
61  // Calculate diff - std:abs!!
62  abs_diff = std::abs(value - fun.minValue());
63  iteration++;
64  }//: while
65  ASSERT_GE(eps, std::abs(fun.calculateValue(x) - fun.minValue()));
66  std::cout << " -> Converged after " << iteration << " iterations\n";
67 }
68 
69 
74 TEST_F(Sphere20DLandscape, GradientDescent_Convergence) {
75  // Optimization function - (S)DG with learning rate = 0.1.
77 
78  // Simulate a simple gradient descent.
79  size_t iteration = 0;
80  double abs_diff = 1.0;
81  while (abs_diff > eps) {
82  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
83  // Update with learning rate = 0.1.
84  opt.update(x, dx, 0.1);
85 
86  // Check whether value of the function is finite.
87  double value = fun.calculateValue(x);
88  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
89 
90  // Calculate diff - std:abs!!
91  abs_diff = std::abs(value - fun.minValue());
92  iteration++;
93  }//: while
94  ASSERT_GE(eps, std::abs(fun.calculateValue(x) - fun.minValue()));
95  std::cout << " -> Converged after " << iteration << " iterations\n";
96 }
97 
98 
103 TEST_F(Beale2DLandscape, GradientDescent_Convergence) {
104  // Optimization function - (S)DG with learning rate = 0.1.
106 
107  // Simulate a simple gradient descent.
108  size_t iteration = 0;
109  double abs_diff = 1.0;
110  while (abs_diff > eps) {
111  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
112  // Update with learning rate = 0.01.
113  opt.update(x, dx, 0.01);
114 
115  // Check whether value of the function is finite.
116  double value = fun.calculateValue(x);
117  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
118 
119  // Calculate diff - std:abs!!
120  abs_diff = std::abs(value - fun.minValue());
121  iteration++;
122  }//: while
123  ASSERT_GE(eps, std::abs(fun.calculateValue(x) - fun.minValue()));
124  std::cout << " -> Converged after " << iteration << " iterations\n";
125 }
126 
127 
132 TEST_F(Rosenbrock2DLandscape, GradientDescent_Convergence) {
133  // Optimization function - (S)DG.
134  // Such a simple gradient descent has a HUUGE problem with instability for bigger ni!
135  // Eg. 0.001 - gradiens explodes after 5 iterations!!
136  double ni = 0.00001;
138 
139  // Simulate a simple gradient descent.
140  size_t iteration = 0;
141  double abs_diff = 1.0;
142  while (abs_diff > eps) {
143  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
144  // Update with learning rate = ni.
145  opt.update(x, dx, ni);
146 
147 /* std::cout << "i=" << iteration << std::endl;
148  std::cout << "(*dx)[0]=" << (*dx)[0] << " (*dx)[1]=" << (*dx)[1] << std::endl;
149  std::cout << "(*x)[0]=" << (*x)[0] << " (*x)[1]=" << (*x)[1] << std::endl;
150  std::cout << " min value = " << fun.minValue() << std::endl;*/
151 
152  // Check whether value of the function is finite.
153  double value = fun.calculateValue(x);
154 // std::cout << " value = " << value << std::endl;
155  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration << " for ni =" << ni;
156 
157  // Calculate diff - std:abs!!
158  abs_diff = std::abs(value - fun.minValue());
159  iteration++;
160 // if (iteration % 1000 == 1)
161 // std::cout << "iteration =" << iteration << " abs value diff = " << abs_diff << std::endl;
162  }//: while
163  ASSERT_GE(eps, std::abs(fun.calculateValue(x) - fun.minValue()));
164  std::cout << " -> Converged after " << iteration << " iterations\n";
165 }
Test fixture - artificial landscape - sphere function 20D (square function).
TEST_F(Sphere1DLandscape, GradientDescent_Convergence)
Test fixture - artificial landscape - Rosenbrock function 2D.
Test fixture - artificial landscape - sphere function 1D (square function).
Test fixture - artificial landscape - Beale's function 2D.
virtual void update(mic::types::MatrixPtr< eT > p_, mic::types::MatrixPtr< eT > dp_, eT learning_rate_, eT decay_=0.0)
Update in the direction of gradient descent.