Regularization in Matrix Relevance Learning

Date
Abstract
Links
Bib
@article{SchneiderIEEE2010,
author = {Petra Schneider and Kerstin Bunte and Barbara Hammer and Michael Biehl},
title = {{Regularization in Matrix Relevance Learning}},
journal = {IEEE Transactions on Neural Networks},
pages = {831--840},
volume = {21},
number = {5},
month = {May},
year = {2010},
doi = {10.1109/TNN.2010.2042729},
url = {http://dx.doi.org/10.1109/TNN.2010.2042729},
abstract = {In this paper, we present a regularization technique to extend recently proposed matrix learning schemes in learning vector quantization (LVQ).  These learning algorithms extend the concept of adaptive distance measures in LVQ to the use of relevance matrices.  In general, metric learning can display a tendency towards oversimplification in the course of training.  An overly pronounced elimination of dimensions in feature space can have negative effects on the performance and may lead to instabilities in the training.  We focus on matrix learning in generalized LVQ (GLVQ).  Extending the cost function by an appropriate regularization term prevents the unfavorable behavior and can help to improve the generalization ability.  The approach is first tested and illustrated in terms of artificial model data.  Furthermore, we apply the scheme to benchmark classification data sets from the UCI Repository of Machine Learning.  We demonstrate the usefulness of regularization also in the case of rank limited relevance matrices, i.e., matrix learning with an implicit, low-dimensional representation of the data},
}