@INPROCEEDINGS{MikSchSmoMueRaeSch99, author = {Mika, S. and {Sch\"olkopf}, B. and Smola, A.J. and {M\"uller}, K.-R. and Scholz, M. and {R\"atsch}, G.}, editor = "Kearns, M.S. and Solla, S.A. and Cohn, D.A.", title = "Kernel {PCA} and De--Noising in Feature Spaces", booktitle = "Advances in Neural Inf. Proc. Systems (NIPS 98)", year = "1999", pages = "536--542", publisher = "{MIT} Press", abstract = "Kernel PCA as a nonlinear feature extractor has proven powerful as a preprocessing step for classification algorithms. But it can also be considered as a natural generalization of linear principal component analysis. This gives rise to the question how to use nonlinear features for data compression, reconstruction, and de-noising, applications common in linear PCA. This is a nontrivial task, as the results provided by kernel PCA live in some high dimensional feature space and need not have pre-images in input space. This work presents ideas for finding approximate pre-images, focusing on Gaussian kernels, and shows experimental results using these pre-images in data reconstruction and de-noising on toy examples as well as on real world data.", pdf = "http://doc.ml.tu-berlin.de/publications/publications/MikSchSmoMueRaeSch99.pdf", postscript = "http://doc.ml.tu-berlin.de/publications/publications/MikSchSmoMueRaeSch99.ps" }