@inproceedings{SonRaeSch06, title = {{A General and Efficient Multiple Kernel Learning Algorithm}}, author = {S\"oren Sonnenburg and Gunnar R\"atsch and Christin Sch\"afer}, booktitle = {Advances in Neural Information Processing Systems 18}, editor = {Y. Weiss and B. Sch\"{o}lkopf and J. Platt}, publisher = {MIT Press}, address = {Cambridge, MA}, pages = {1273--1280}, dataset = {http://www.fml.tuebingen.mpg.de/raetsch/projects/mkl_silp/}, year = {2006}, ps = {http://books.nips.cc/papers/files/nips18/NIPS2005_0674.ps.gz}, pdf = {http://books.nips.cc/papers/files/nips18/NIPS2005_0674.pdf}, abstract = { While classical kernel-based learning algorithms are based on a single kernel, in practice it is often desirable to use multiple kernels. Lankriet et al. (2004) considered conic combinations of kernel matrices for classification, leading to a convex quadratically constraint quadratic program. We show that it can be rewritten as a semi-infinite linear program that can be efficiently solved by recycling the standard SVM implementations. Moreover, we generalize the formulation and our method to a larger class of problems, including regression and one-class classification. Experimental results show that the proposed algorithm helps for automatic model selection, improving the interpretability of the learning result and works for hundred thousands of examples or hundreds of kernels to be combined.} }