@inproceedings{bdd485c856e54872a887a848792cd82a,
title = "Learning and storing the parts of objects: IMF",
abstract = "A central concern for many learning algorithms is how to efficiently store what the algorithm has learned. An algorithm for the compression of Nonnegative Matrix Factorizations is presented. Compression is achieved by embedding the factorization in an encoding routine. Its performance is investigated using two standard test images, Peppers and Barbara. The compression ratio (18:1) achieved by the proposed Matrix Factorization improves the storage-ability of Nonnegative Matrix Factorizations without significantly degrading accuracy (≈ 1-3dB degradation is introduced). We learn as before, but storage is cheaper.",
keywords = "compression, matrix factorization",
author = "{De Fr{\'e}in}, Ruair{\'i}",
note = "Publisher Copyright: {\textcopyright} 2014 IEEE.; 2014 24th IEEE International Workshop on Machine Learning for Signal Processing, MLSP 2014 ; Conference date: 21-09-2014 Through 24-09-2014",
year = "2014",
month = nov,
day = "14",
doi = "10.1109/MLSP.2014.6958926",
language = "English",
series = "IEEE International Workshop on Machine Learning for Signal Processing, MLSP",
publisher = "IEEE Computer Society",
editor = "Mamadou Mboup and Tulay Adali and Eric Moreau and Jan Larsen",
booktitle = "IEEE International Workshop on Machine Learning for Signal Processing, MLSP",
address = "United States",
}