MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
SoftmaxTests.cpp
Go to the documentation of this file.
1 
25 #include "SoftmaxTests.hpp"
26 
27 
28 
33  double eps = 1e-5;
34 
35  // Forward pass.
36  mic::types::MatrixPtr<float> y = layer.forward(input_x);
37 
38  for (size_t i=0; i<4; i++)
39  ASSERT_LE( fabs((*y)[i] - (*output_y)[i]), eps) << "Difference at position i=" << i << " where " << (*y)[i] << " and should be " << (*output_y)[i];
40 }
41 
42 
43 
47 /*TEST_F(Softmax4x1Float, NumericalGradientCheck_dx) {
48 
49  // Calculate gradients.
50  mic::types::MatrixPtr<double> predicted_y = layer.forward(input_x);
51  mic::types::MatrixPtr<double> dy = loss.calculateGradient(target_y, predicted_y);
52  layer.backward(dy);
53  // Store resulting gradients - make a copy!
54  mic::types::MatrixPtr<double> dx = MAKE_MATRIX_PTR(double, *layer.g["x"]);
55 
56  // Calculate numerical gradients.
57  double delta = 1e-5;
58  mic::types::MatrixPtr<double> nx = layer.calculateNumericalGradient<mic::neural_nets::loss::SquaredErrorLoss<double> >(const_x, target_y, --, loss, delta);
59 
60  // Compare gradients.
61  double eps = 1e-8;
62  for (size_t i=0; i<(size_t)dW->size(); i++){
63 // std::cout << "i=" << i << " (*dW)[i]= " << (*dW)[i] << " (*nW)[i]= " << (*nW)[i] << std::endl;
64  EXPECT_LE( fabs((*dW)[i] - (*nW)[i]), eps) << "Too big difference between dW and numerical dW at position i=" << i;
65  }//: for
66 }*/
67 
68 
69 int main(int argc, char **argv) {
70  testing::InitGoogleTest(&argc, argv);
71  return RUN_ALL_TESTS();
72 }
Test Fixture - 4x1 softmax layer.
int main(int argc, char **argv)
TEST_F(Softmax4x1Float, Forward)