MachineIntelligenceCore:NeuralNets
 All Classes Namespaces Files Functions Variables Enumerations Enumerator Friends Macros
mnist_hebbian_features_visualization_test.cpp
Go to the documentation of this file.
1 
26 #include <boost/thread/thread.hpp>
27 #include <boost/bind.hpp>
28 
29 #include <importers/MNISTMatrixImporter.hpp>
30 
31 #include <logger/Log.hpp>
32 #include <logger/ConsoleOutput.hpp>
33 using namespace mic::logger;
34 
35 #include <application/ApplicationState.hpp>
36 
37 #include <configuration/ParameterServer.hpp>
38 
39 #include <opengl/visualization/WindowManager.hpp>
40 #include <opengl/visualization/WindowGrayscaleBatch.hpp>
41 using namespace mic::opengl::visualization;
42 
43 // Hebbian neural net.
45 using namespace mic::mlnn;
46 
47 // Encoders.
48 #include <encoders/MatrixXfMatrixXfEncoder.hpp>
49 #include <encoders/UIntMatrixXfEncoder.hpp>
50 
52 WindowGrayscaleBatch<float>* w_input;
53 WindowGrayscaleBatch<float>* w_reconstruction;
55 WindowGrayscaleBatch<float>* w_weights1;
56 WindowGrayscaleBatch<float>* w_weights2;
57 
59 mic::importers::MNISTMatrixImporter<float>* importer;
62 
64 mic::encoders::MatrixXfMatrixXfEncoder* mnist_encoder;
66 //mic::encoders::UIntMatrixXfEncoder* label_encoder;
67 
68 const size_t patch_size = 28;
69 const size_t batch_size = 4;
70 const size_t output_units = 12;
71 
76 void batch_function (void) {
77 
78 /* if (neural_net.load(fileName)) {
79  LOG(LINFO) << "Loaded neural network from a file";
80  } else {*/
81  {
82  // Create a simple hebbian network.
83  neural_net.pushLayer(new BinaryCorrelator<float>(patch_size*patch_size, output_units, 0.6, 28*28*0.01));
85 
86  LOG(LINFO) << "Generated new neural network";
87  }//: else
88 
89  size_t iteration = 0;
90 
91  // Main application loop.
92  while (!APP_STATE->Quit()) {
93 
94  // If not paused.
95  if (!APP_STATE->isPaused()) {
96 
97  // If single step mode - pause after the step.
98  if (APP_STATE->isSingleStepModeOn())
99  APP_STATE->pressPause();
100 
101  { // Enter critical section - with the use of scoped lock from AppState!
102  APP_DATA_SYNCHRONIZATION_SCOPED_LOCK();
103 
104  // Retrieve the next minibatch.
105  mic::types::MNISTBatch<float> bt = importer->getRandomBatch();
106 
107  // Set batch to be displayed.
108  w_input->setBatchUnsynchronized(bt.data());
109 
110  // Encode data.
111  mic::types::MatrixXfPtr encoded_batch = mnist_encoder->encodeBatch(bt.data());
112  mic::types::MatrixXfPtr encoded_labels = mnist_encoder->encodeBatch(bt.data());
113 
114  // Train the autoencoder.
115  float loss = neural_net.train (encoded_batch, 0.05);
116 
117  // Get reconstruction.
118  mic::types::MatrixXfPtr encoded_reconstruction = neural_net.getPredictions();
119 
120  std::vector<mic::types::MatrixXfPtr> decoded_reconstruction = mnist_encoder->decodeBatch(encoded_reconstruction);
121  w_reconstruction->setBatchUnsynchronized(decoded_reconstruction);
122 
123  if (iteration%10 == 0) {
124  // Visualize the weights.
125  std::shared_ptr<mic::mlnn::BinaryCorrelator<float> > layer1 = neural_net.getLayer<mic::mlnn::BinaryCorrelator<float> >(0);
126  w_weights1->setBatchUnsynchronized(layer1->getActivations(patch_size, patch_size));
127 
128  }//: if
129 
130  iteration++;
131  LOG(LINFO) << "Iteration: " << iteration << "loss= " << loss;
132  }//: end of critical section
133 
134  }//: if
135 
136  // Sleep.
137  APP_SLEEP();
138  }//: while
139 
140 }//: image_encoder_and_visualization_test
141 
142 
143 
151 int main(int argc, char* argv[]) {
152  // Set console output to logger.
153  LOGGER->addOutput(new ConsoleOutput());
154  LOG(LINFO) << "Logger initialized. Starting application";
155 
156  // Parse parameters.
157  PARAM_SERVER->parseApplicationParameters(argc, argv);
158 
159  // Initilize application state ("touch it") ;)
160  APP_STATE;
161 
162  // Load dataset.
163  importer = new mic::importers::MNISTMatrixImporter<float>();
164  importer->setBatchSize(batch_size);
165 
166  // Initialize the encoders.
167  mnist_encoder = new mic::encoders::MatrixXfMatrixXfEncoder(patch_size, patch_size);
168  //label_encoder = new mic::encoders::UIntMatrixXfEncoder(batch_size);
169 
170  // Set parameters of all property-tree derived objects - USER independent part.
171  PARAM_SERVER->loadPropertiesFromConfiguration();
172 
173  // Initialize property-dependent variables of all registered property-tree objects - USER dependent part.
174  PARAM_SERVER->initializePropertyDependentVariables();
175 
176  // Import data from datasets.
177  if (!importer->importData())
178  return -1;
179 
180  // Initialize GLUT! :]
181  VGL_MANAGER->initializeGLUT(argc, argv);
182 
183  // Create batch visualization window.
184  w_input = new WindowGrayscaleBatch<float>("Input batch", Grayscale::Norm_HotCold, Grayscale::Grid_Both, 70, 0, 250, 250);
185  w_reconstruction = new WindowGrayscaleBatch<float>("Reconstructed batch", Grayscale::Norm_HotCold, Grayscale::Grid_Both, 320, 0, 250, 250);
186  w_weights1 = new WindowGrayscaleBatch<float>("Permanences", Grayscale::Norm_HotCold, Grayscale::Grid_Both, 570, 0, 250, 250);
187 // w_weights2 = new WindowGrayscaleBatch<float>("Connectivity", 1092, 0, 512, 512);
188 
189  boost::thread batch_thread(boost::bind(&batch_function));
190 
191  // Start visualization thread.
192  VGL_MANAGER->startVisualizationLoop();
193 
194  LOG(LINFO) << "Waiting for threads to join...";
195  // End test thread.
196  batch_thread.join();
197  LOG(LINFO) << "Threads joined - ending application";
198 }//: main
WindowGrayscaleBatch< float > * w_weights2
mic::encoders::MatrixXfMatrixXfEncoder * mnist_encoder
MNIST matrix encoder.
WindowGrayscaleBatch< float > * w_reconstruction
Updates according to classical Hebbian rule (wij += ni * x * y) with additional normalization.
HebbianNeuralNetwork< float > neural_net
Multi-layer neural network.
int main(int argc, char *argv[])
Main program function. Runs two threads: main (for GLUT) and another one (for data processing)...
const size_t patch_size
Label 2 matrix encoder (1 hot).
mic::importers::MNISTMatrixImporter< float > * importer
MNIST importer.
WindowGrayscaleBatch< float > * w_input
Window for displaying the MNIST batch.
Class implementing a linear, fully connected layer.
WindowGrayscaleBatch< float > * w_weights1
Window for displaying the weights.
void batch_function(void)
Function for batch sampling.
Class representing a multi-layer neural network based on hebbian learning.