25 #ifndef HEBBIANNEURALNETWORK_HPP_
26 #define HEBBIANNEURALNETWORK_HPP_
40 template <
typename eT>
65 void forward(mic::types::MatrixPtr<eT> input_data,
bool skip_dropout =
false) {
67 assert(
layers.size() != 0);
70 LOG(LDEBUG) <<
"Inputs size: " << input_data->rows() <<
"x" << input_data->cols();
71 LOG(LDEBUG) <<
"First layer input matrix size: " <<
layers[0]->s[
'x']->rows() <<
"x" <<
layers[0]->s[
'x']->cols();
75 assert((
layers[0]->s[
'x'])->rows() == input_data->rows());
83 for (
size_t i = 0; i <
layers.size()-1; i++) {
85 assert(
layers[i+1]->s[
'x']->rows() ==
layers[i]->s[
'y']->rows());
97 (*(
layers[0]->s[
'x'])) = (*input_data);
100 for (
size_t i = 0; i <
layers.size(); i++) {
101 LOG(LDEBUG) <<
"Layer [" << i <<
"] " <<
layers[i]->name() <<
": (" <<
102 layers[i]->inputSize() <<
"x" <<
layers[i]->batchSize() <<
") -> (" <<
103 layers[i]->outputSize() <<
"x" <<
layers[i]->batchSize() <<
")";
106 layers[i]->forward(skip_dropout);
118 eT
train(mic::types::MatrixPtr<eT> encoded_batch_, eT learning_rate_) {
140 eT
test(mic::types::MatrixPtr<eT> encoded_batch_) {
142 bool skip_dropout =
true;
144 forward(encoded_batch_, skip_dropout);
HebbianNeuralNetwork(std::string name_="hebbian_net")
virtual ~HebbianNeuralNetwork()
Virtual descriptor - empty.
bool connected
Flag denoting whether the layers are interconnected, thus no copying between inputs and outputs of th...
std::vector< std::shared_ptr< mic::mlnn::Layer< eT > > > layers
eT test(mic::types::MatrixPtr< eT > encoded_batch_)
void update(eT alpha_, eT decay_=0.0f)
Class representing a multi-layer neural network.
void forward(mic::types::MatrixPtr< eT > input_data, bool skip_dropout=false)
eT train(mic::types::MatrixPtr< eT > encoded_batch_, eT learning_rate_)
void resizeBatch(size_t batch_size_)
Class representing a multi-layer neural network based on hebbian learning.