@article{KloBreSonZie11, author = {Marius Kloft and Ulf Brefeld and S\"oren Sonnenburg and Alexander Zien}, title = {Lp-norm Multiple Kernel Learning}, journal = {Journal of Machine Learning Research}, year = {2011}, pages = {953--997}, year = {2011}, volume = {12}, month = {Mar}, dataset = {http://doc.ml.tu-berlin.de/nonsparse_mkl/}, url = {http://doc.ml.tu-berlin.de/nonsparse_mkl/}, pdf = {http://www.jmlr.org/papers/volume12/kloft11a/kloft11a.pdf}, abstract = {Learning linear combinations of multiple kernels is an appealing strategy when the right choice of features is unknown. Previous approaches to multiple kernel learning (MKL) promote sparse kernel combinations to support interpretability and scalability. Unfortunately, this 1-norm MKL is rarely observed to outperform trivial baselines in practical applications. To allow for robust kernel mixtures, we generalize MKL to arbitrary norms. We devise new insights on the connection between several existing MKL formulations and develop two efficient interleaved optimization strategies for arbitrary norms, like p -norms with p > 1. Empirically, we demonstrate that the interleaved optimization strategies are much faster compared to the commonly used wrapper approaches. An experiment on controlled artificial data experiment sheds light on the appropriateness of sparse, non-sparse and infinity norm MKL in various scenarios. Application of p-norm MKL to three hard real-world problems from computational biology show that non-sparse MKL achieves accuraciesthat go beyond the state-of-the-art. We conclude that our improvements finally made MKL fit for deployment to practical applications: MKL now has a good chance of improving the accuracy (over a plain sum kernel) at an affordable computational cost.}, }