MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
ReLU.hpp
Go to the documentation of this file.
1 
23 #ifndef SRC_MLNN_RELU_HPP_
24 #define SRC_MLNN_RELU_HPP_
25 
26 #include <mlnn/layer/Layer.hpp>
27 
28 namespace mic {
29 namespace mlnn {
30 namespace activation_function {
31 
36 template <typename eT=float>
37 class ReLU : public mic::mlnn::Layer<eT> {
38 public:
39 
45  ReLU(size_t size_, std::string name_ = "ReLU") :
46  ReLU(size_, 1, 1, name_)
47  {
48  std::cout<<"constructor ReLU 1!\n";
49  }
50 
51 
59  ReLU(size_t height_, size_t width_, size_t depth_, std::string name_ = "ReLU") :
60  Layer<eT>::Layer(height_, width_, depth_,
61  height_, width_, depth_,
62  LayerTypes::ReLU, name_)
63  {
64 
65  }
66 
70  virtual ~ReLU() {};
71 
72  void forward(bool apply_dropout = false) {
73  // Access the data of both matrices.
74  eT* x = s['x']->data();
75  eT* y = s['y']->data();
76 
77  // Iterate through elements.
78  size_t size = s['x']->rows() * s['x']->cols();
79  for (size_t i = 0; i < size; i++) {
80  y[i] = fmax(x[i], 0.0f); //: floats - fmax
81  }//: for
82 
83 /* std::cout << "ReLU forward: s['x'] = \n" << (*s['x']) << std::endl;
84  std::cout << "ReLU forward: s['y'] = \n" << (*s['y']) << std::endl;*/
85  }
86 
87  void backward() {
88  // Access the data of matrices.
89  eT* gx = g['x']->data();
90  eT* gy = g['y']->data();
91  eT* y = s['y']->data();
92 
93  // Iterate through elements.
94  size_t size = g['x']->rows() * g['x']->cols();
95  for (size_t i = 0; i < size; i++) {
96  // Calculate the ReLU "derivative".
97  eT dy = (eT)(y[i] > 0.0);
98  // Pass the gradient.
99  gx[i] = dy * gy[i];
100 
101  }//: for
102 
103 /* std::cout << "ReLU backward: g['y'] = \n" << (*g['y']) << std::endl;
104  std::cout << "ReLU backward: g['x'] = \n" << (*g['x']) << std::endl;*/
105  }
106 
112  virtual void update(eT alpha_, eT decay_ = 0.0f) { };
113 
114 protected:
115  // Unhiding the template inherited fields via "using" statement.
116  using Layer<eT>::g;
117  using Layer<eT>::s;
118 
119 private:
120  // Friend class - required for using boost serialization.
121  template<typename tmp> friend class mic::mlnn::MultiLayerNeuralNetwork;
122 
126  ReLU<eT>() : Layer<eT> () { std::cout << "Private ReLU\n";}
127 
128 };
129 
130 } /* activation_function */
131 } /* namespace mlnn */
132 } /* namespace mic */
133 
134 #endif /* SRC_MLNN_RELU_HPP_ */
virtual void update(eT alpha_, eT decay_=0.0f)
Definition: ReLU.hpp:112
ReLU(size_t height_, size_t width_, size_t depth_, std::string name_="ReLU")
Definition: ReLU.hpp:59
void forward(bool apply_dropout=false)
Definition: ReLU.hpp:72
ReLU(size_t size_, std::string name_="ReLU")
Definition: ReLU.hpp:45
Class representing a multi-layer neural network.
Definition: Layer.hpp:86
LayerTypes
Enumeration of possible layer types.
Definition: Layer.hpp:58
mic::types::MatrixArray< eT > s
States - contains input [x] and output [y] matrices.
Definition: Layer.hpp:753
mic::types::MatrixArray< eT > g
Gradients - contains input [x] and output [y] matrices.
Definition: Layer.hpp:756
Contains a template class representing a layer.