MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
AdaGradTests.cpp
Go to the documentation of this file.
1 
25 #include <gtest/gtest.h>
26 #include <cmath>
27 
29 
30 // Redefine word "public" so every class field/method will be accessible for tests.
31 #define private public
32 #include <optimization/AdaGrad.hpp>
33 
34 
36 // AdaGrad
38 
39 
44 TEST_F(Sphere1DLandscape, DISABLED_AdaGrad_Convergence) {
45  // Optimization function - 1d AdaGrad with learning rate = 0.1 (and default 0.9 momentum).
46  mic::neural_nets::optimization::AdaGrad<double> opt(x->rows(), x->cols());
47 
48  // Simulate a simple gradient descent.
49  size_t iteration = 0;
50  double abs_diff = 1.0;
51  while (abs_diff > eps) {
52  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
53  // Update with learning rate = 0.1.
54  opt.update(x, dx, 0.1);
55 
56  // Check whether value of the function is finite.
57  double value = fun.calculateValue(x);
58  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
59 
60  // Calculate diff - std:abs!!
61  abs_diff = std::abs(value - fun.minValue());
62  iteration++;
63  }//: while
64  ASSERT_GE(eps, std::abs(fun.calculateValue(x) - fun.minValue()));
65  std::cout << " -> Converged after " << iteration << " iterations\n";
66 }
67 
68 
73 TEST_F(Sphere20DLandscape, DISABLED_AdaGrad_Convergence) {
74  // Optimization function - 20d AdaGrad.
75  mic::neural_nets::optimization::AdaGrad<double> opt(x->rows(), x->cols());
76 
77  // Simulate a simple gradient descent.
78  size_t iteration = 0;
79  double abs_diff = 1.0;
80  while (abs_diff > eps) {
81  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
82  // Update with default learning rate.
83  opt.update(x, dx, 0.001);
84 
85  // Check whether value of the function is finite.
86  double value = fun.calculateValue(x);
87  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
88 
89  // Calculate diff - std:abs!!
90  abs_diff = std::abs(value - fun.minValue());
91  iteration++;
92  }//: while
93  ASSERT_GE(eps, std::abs(fun.calculateValue(x) - fun.minValue()));
94  std::cout << " -> Converged after " << iteration << " iterations\n";
95 }
96 
97 
Test fixture - artificial landscape - sphere function 20D (square function).
TEST_F(Sphere1DLandscape, DISABLED_AdaGrad_Convergence)
Test fixture - artificial landscape - sphere function 1D (square function).
virtual void update(mic::types::MatrixPtr< eT > p_, mic::types::MatrixPtr< eT > dp_, eT learning_rate_, eT decay_=0.0)
Update using AdaGrad - adaptive gradient descent.
Definition: AdaGrad.hpp:39