MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
AdaDeltaTests.cpp
Go to the documentation of this file.
1 
25 #include <gtest/gtest.h>
26 #include <cmath>
27 
29 
30 // Redefine word "public" so every class field/method will be accessible for tests.
31 #define private public
33 
34 
35 
37 // AdaDelta
39 
44 TEST_F(Sphere1DLandscape, AdaDelta_Convergence) {
45  // Optimization function - 1d AdaGrad with default values.
46  mic::neural_nets::optimization::AdaDelta<double> opt(x->rows(), x->cols());
47 
48  // Simulate a simple gradient descent.
49  size_t iteration = 0;
50  double abs_diff = 1.0;
51  while (abs_diff > eps) {
52  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
53  // Update with default learning rate.
54  opt.update(x, dx, 0.001);
55 
56  // Check whether value of the function is finite.
57  double value = fun.calculateValue(x);
58  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
59 
60  // Calculate diff - std:abs!!
61  abs_diff = std::fabs(value - fun.minValue());
62  iteration++;
63  }//: while
64  ASSERT_GE(eps, std::fabs(fun.calculateValue(x) - fun.minValue()));
65  std::cout << " -> Converged after " << iteration << " iterations\n";
66 }
67 
68 
73 TEST_F(Sphere20DLandscape, AdaDelta_Convergence) {
74  // Optimization function - 20d AdaDelta (with default values).
75  mic::neural_nets::optimization::AdaDelta<double> opt(x->rows(), x->cols());
76 
77  // Simulate a simple gradient descent.
78  size_t iteration = 0;
79  double abs_diff = 1.0;
80  while (abs_diff > eps) {
81  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
82  // Update with default learning rate.
83  opt.update(x, dx, 0.001);
84 
85  // Check whether value of the function is finite.
86  double value = fun.calculateValue(x);
87  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
88 
89  // Calculate diff - std:abs!!
90  abs_diff = std::fabs(value - fun.minValue());
91  iteration++;
92  }//: while
93  ASSERT_GE(eps, std::fabs(fun.calculateValue(x) - fun.minValue()));
94  std::cout << " -> Converged after " << iteration << " iterations\n";
95 }
96 
97 
102 TEST_F(Beale2DLandscape, DISABLED_AdaDelta_Convergence) {
103  // Optimization AdaDelta.
104  mic::neural_nets::optimization::AdaDelta<double> opt(x->rows(), x->cols());
105 
106  // Simulate a simple gradient descent.
107  size_t iteration = 0;
108  double abs_diff = 1.0;
109  while (abs_diff > eps) {
110  mic::types::MatrixPtr<double> dx = fun.calculateGradient(x);
111  // Update with default learning rate.
112  opt.update(x, dx, 0.001);
113 
114  // Check whether value of the function is finite.
115  double value = fun.calculateValue(x);
116  ASSERT_EQ(true, std::isfinite(value)) << " at iteration i=" << iteration;
117 
118  // Calculate diff - std:abs!!
119  abs_diff = std::fabs(value - fun.minValue());
120  iteration++;
121  }//: while
122  ASSERT_GE(eps, std::fabs(fun.calculateValue(x) - fun.minValue()));
123  std::cout << " -> Converged after " << iteration << " iterations\n";
124 }
125 
126 
127 
Update using AdaDelta - adaptive gradient descent with running average E[g^2] and E[d^2]...
Definition: AdaDelta.hpp:39
Test fixture - artificial landscape - sphere function 20D (square function).
Test fixture - artificial landscape - sphere function 1D (square function).
TEST_F(Sphere1DLandscape, AdaDelta_Convergence)
Test fixture - artificial landscape - Beale's function 2D.
virtual void update(mic::types::MatrixPtr< eT > p_, mic::types::MatrixPtr< eT > dp_, eT learning_rate_, eT decay_=0.0)