Low-Rank Kernel Space Representations in Prototype Learning

Date
Abstract
Links
Bib
@inproceedings{WSOM2016,
author = {Kerstin Bunte and Marika Kaden and Frank-Michael Schleif},
title = {Low-Rank Kernel Space Representations in Prototype Learning},
booktitle = {Advances in Self-Organizing Maps and Learning Vector Quantization: Proceedings of the 11th International Workshop WSOM},
address = {Houston, Texas, USA},
chapter = {Low-Rank Kernel Space Representations in Prototype Learning},
editor = {Mer\'enyi, Erzs\'ebet and Mendenhall, J. Michael and O'Driscoll, Patrick},
volume = {428},
pages = {341--353},
month = {"Jan."},
year = {2016},
publisher = {Springer International Publishing},
isbn = {978-3-319-28518-4},
doi = {10.1007/978-3-319-28518-4_30},
url = {http://dx.doi.org/10.1007/978-3-319-28518-4_30},
abstract = {In supervised learning feature vectors are often implicitly mapped to a high-dimensional space using the kernel trick with quadratic costs for the learning algorithm.  The recently proposed random Fourier features provide an explicit mapping such that classical algorithms with often linear complexity can be applied.  Yet, the random Fourier feature approach remains widely complex techniques which are difficult to interpret.  Using Matrix Relevance Learning the linear mapping of the data for a better class separation can be learned by adapting a parametric Euclidean distance.  Further, a low-rank representation of the input data can be obtained.  We apply this technique to random Fourier feature encoded data to obtain a discriminative mapping of the kernel space.  This explicit approach is compared with a differentiable kernel vector quantizer on the same but implicit kernel representation.  Using multiple benchmark problems, we demonstrate that a parametric distance on a RBF encoding yields to better classification results and permits access to  interpretable prediction models with visualization abilities},
}