Exploratory Observation Machine (XOM) with Kullback-Leibler Divergence for Dimensionality Reduction and Visualization

Date
Abstract
Links
Bib
@INPROCEEDINGS{Bunte2010_NEXOM,
author = {Kerstin Bunte and Barbara Hammer and Thomas Villmann and Michael Biehl and Axel Wism\"uller},
title = {{Exploratory Observation Machine (XOM) with Kullback-Leibler Divergence for Dimensionality Reduction and Visualization}},
booktitle = {Proc. of the  18th "European Symposium on Artificial Neural Networks (ESANN)},
month = {"Apr."},
pages = {87--92},
editor = {M. Verleysen},
publisher = {D-facto Publications},
address = {Bruges, Belgium},
year = {2010},
url = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.216.801},
abstract = {We present an extension of the Exploratory Observation Machine (XOM) for structure-preserving dimensionality reduction.  Based on minimizing the Kullback-Leibler divergence of neighborhood functions in data and image spaces,  this Neighbor Embedding XOM (NE-XOM) creates a link between fast sequential online learning known from topologypreserving mappings and  principled direct divergence optimization approaches. We quantitatively evaluate our method on real world data using multiple embedding quality measures.  In this comparison, NE-XOM performs as a competitive trade-off between high embedding quality and low computational expense,  which motivates its further use in real-world settings throughout science and engineering},
}