@article{
SugKawMue04a,
author = {Sugiyama, Masashi and Kawanabe, Motoaki and M{\"u}ller, Klaus-Robert},
title = "Trading Variance Reduction with Unbiasedness: {T}he Regularized Subspace Information Criterion for Robust Model Selection in Kernel Regression",
journal = "Neural Computation",
number = "5",
volume = "16",
postscript = "http://doc.ml.tu-berlin.de/publications/publications/SugKawMue04a.ps.gz",
abstruct = "A well-known result by Stein (1956) shows that in particular situations, biased estimators can yield better parameter estimates than their generally preferred unbiased counterparts. This paper follows the same spirit as we will stabilize the unbiased generalization error estimates by regularization and finally obtain more robust model selection criteria for learning. We trade a small bias against a larger variance reduction which has the beneficial effect of being more precise on a single training set. We focus on the subspace information criterion (SIC), which is an unbiased estimator of the expected generalization error measured by the reproducing kernel Hilbert space norm. SIC can be applied to the kernel regression and it was shown in earlier experiments that a small regularization of SIC has a stabilization effect. However, it remained open how to appropriately determine the degree of regularization in SIC. In this paper, we derive an unbiased estimator of the expected squared error between SIC and the expected generalization error, and propose determining the degree of regularization of SIC such that the estimator of the expected squared error is minimized. Computer simulations with artificial and real data sets illustrate that the proposed method works effectively for improving the precision of SIC, especially in the high noise level cases. We furthermore compare to the original SIC, the cross-validation, and an empirical Bayesian method in ridge parameter selection, with good results.",
year = "2004",
pdf = "http://doc.ml.tu-berlin.de/publications/publications/SugKawMue04a.pdf",
pages = "1077--1104"
}