@ARTICLE{SonRaeSchSch06, author = {Sonnenburg, S\"oren and R\"atsch, Gunnar and Sch\"afer, Christin and Sch\"olkopf, Bernhard}, editor = "Bennett, K. and -Hernandez, E. P.", title = "{Large Scale Multiple Kernel Learning}", journal = "Journal of Machine Learning Research", year = "2006", volume = "7", pages = "1531--1565", month = "July", abstract = "While classical kernel-based learning algorithms are based on a single kernel, in practice it is often desirable to use multiple kernels. Lankriet et al. (2004) considered conic combinations of kernel matrices for classification, leading to a convex quadratically constrained quadratic program. We show that it can be rewritten as a semi-infinite linear program that can be efficiently solved by recycling the standard SVM implementations. Moreover, we generalize the formulation and our method to a larger class of problems, including regression and one-class classification. Experimental results show that the proposed algorithm works for hundred thousands of examples or hundreds of kernels to be combined, and helps for automatic model selection, improving the interpretability of the learning result. In a second part we discuss general speed up mechanism for SVMs, especially when used with sparse feature maps as appear for string kernels, allowing us to train a string kernel SVM on a 10 million real-world splice dataset from computational biology. We integrated Multiple Kernel Learning in our Machine Learning toolbox \texttt{SHOGUN} for which the source code is publicly available at http://www.fml.tuebingen.mpg.de/raetsch/projects/shogun.", dataset = "http://www.fml.tuebingen.mpg.de/raetsch/projects/lsmkl/", pdf = "http://www.jmlr.org/papers/volume7/sonnenburg06a/sonnenburg06a.pdf" }