26 #ifndef SRC_MLNN_CONVHEBBIAN_HPP_
27 #define SRC_MLNN_CONVHEBBIAN_HPP_
33 namespace experimental {
39 template <
typename eT=
float>
58 p.add(
"W",
nfilters, filter_size * filter_size);
59 mic::types::MatrixPtr<eT>
W =
p[
"W"];
62 Layer<eT>::template setOptimization<mic::neural_nets::learning::NormalizedZerosumHebbianRule<eT> > ();
66 for(
auto i = 0 ; i < W->rows() ; i++) {
68 W->row(i).array() -= W->row(i).sum() / W->row(i).cols();
69 if(W->row(i).norm() != 0){
70 W->row(i) = W->row(i).normalized();
87 mic::types::Matrix<eT> x = (*
s[
"x"]);
88 mic::types::Matrix<eT>
W = (*
p[
"W"]);
90 mic::types::MatrixPtr<eT> y =
s[
"y"];
97 for(
size_t patch_y = 0 ; patch_y <
filter_size ; patch_y++){
99 x2col->block(patch_y * filter_size, ox + (output_width * oy), filter_size, 1) =
100 x.block((((oy *
stride) + patch_y) *
input_width) + (ox * stride), 0, filter_size, 1);
123 void update(eT alpha_, eT decay_ = 0.0f) {
124 opt[
"W"]->update(
p[
"W"],
x2col,
s[
"y"], alpha_);
137 mic::types::MatrixPtr<eT>
W =
s[
"y"];
140 for (
size_t i = 0 ; i <
nfilters ; i++) {
164 mic::types::MatrixPtr<eT> o =
s[
"y"];
165 mic::types::MatrixPtr<eT> w =
p[
"W"];
169 for(
size_t ker = 0 ; ker <
nfilters ; ker++){
171 mic::types::Matrix<eT> k;
172 k = (w->row(ker)).transpose();
173 k = k.array().max(0.);
174 conv2col->col(i) += ((*o)(ker, i) > 0 ? (*o)(ker, i) : 0) * k;
183 for(
size_t ker = 0 ; ker <
nfilters ; ker++){
184 mic::types::Matrix<eT> temp;
185 temp =
conv2col->col(y + (x * output_height));
209 mic::types::Matrix<eT> diff;
210 diff = r.normalized() - (*
s[
"x"]).normalized();
211 eT error = diff.squaredNorm();
223 mic::types::MatrixPtr<eT>
W =
p[
"W"];
226 for (
size_t i = 0 ; i <
nfilters ; i++) {
232 row->resize(filter_size, filter_size);
244 std::vector< std::shared_ptr <mic::types::Matrix<eT> > > &
getWeightSimilarity(
bool fillDiagonal =
false) {
249 mic::types::MatrixPtr<eT>
W =
p[
"W"];
253 for (
size_t i = 0 ; i <
nfilters ; i++) {
254 for(
size_t j = 0 ; j < i ; j++) {
256 eT sim = W->row(j).dot(W->row(i));
257 sim /= W->row(i).norm() * W->row(j).norm();
259 (*row)(j + (nfilters * i)) = sim;
261 (*row)(i + (nfilters * j)) = sim;
267 for (
size_t i = 0 ; i <
nfilters ; i++) {
268 (*row)(i + (nfilters * i)) = 1 - (int)(2 * (i % 2));
272 row->resize(nfilters, nfilters);
286 mic::types::MatrixPtr<eT>
W =
p[
"W"];
290 for (
size_t i = 0 ; i <
nfilters ; i++) {
291 for(
size_t j = 0 ; j <
nfilters ; j++){
293 (*row)(j + (nfilters * i)) = std::abs(W->row(j).dot(W->row(i)));
294 (*row)(j + (nfilters * i)) /= W->row(i).norm() * W->row(j).norm();
296 (*row)(j + (nfilters * i)) = std::sqrt(1 - std::pow((*row)(j + (nfilters * i)), 2));
300 row->resize(nfilters, nfilters);
339 std::vector<std::vector<mic::types::Matrix<eT> > >
W;
mic::types::MatrixPtr< eT > x2col
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > o_activations
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > w_similarity
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > w_activations
Vector containing activations of neurons.
size_t input_depth
Number of channels of the input (e.g. 3 for RGB images).
mic::types::MatrixPtr< eT > conv2col
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > w_dissimilarity
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > & getWeightDissimilarity()
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > o_reconstruction
Class implementing a convolutional hebbian layer.
void forward(bool test_=false)
bool o_reconstruction_updated
eT getOutputReconstructionError()
getOutputReconstructionError
std::vector< std::vector< mic::types::Matrix< eT > > > W
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > & getWeightActivations()
size_t input_height
Height of the input (e.g. 28 for MNIST).
void lazyAllocateMatrixVector(std::vector< std::shared_ptr< mic::types::Matrix< eT > > > &vector_, size_t vector_size_, size_t matrix_height_, size_t matrix_width_)
Class representing a multi-layer neural network.
void update(eT alpha_, eT decay_=0.0f)
mic::neural_nets::optimization::OptimizationArray< eT > opt
Array of optimization functions.
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > & getWeightSimilarity(bool fillDiagonal=false)
Returns cosine similarity matrix of filters.
mic::types::MatrixArray< eT > s
States - contains input [x] and output [y] matrices.
size_t input_width
Width of the input (e.g. 28 for MNIST).
size_t output_height
Number of receptive fields in a single channel - vertical direction.
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > & getOutputReconstruction()
std::vector< std::shared_ptr< mic::types::Matrix< eT > > > & getOutputActivations()
size_t output_width
Number of receptive fields in a single channel - horizontal direction.
Contains a template class representing a layer.
mic::types::MatrixArray< eT > p
Parameters - parameters of the layer, to be used by the derived classes.