@InProceedings{ZieKraSonRae09, author = {Alexander Zien and Nicole Kr{\"a}mer and S{\"o}ren Sonnenburg and Gunnar R{\"a}tsch}, title = {The Feature Importance Ranking Measure}, year = {2009}, booktitle = {In Proceedings of the European Conference on Machine Learning}, editor = {W. Buntine and M. Grobelnik and D. Mladenic and J. Shawe-Taylor}, volume = {5782/2009}, pages = {694--709}, publisher = {Springer Berlin / Heidelberg}, edition = {XXIX}, series = {Lecture Notes in Artificial Intelligence}, pdf = {http://arxiv.org/pdf/0906.4258v1}, abstract = {Most accurate predictions are typically obtained by learning machines with complex feature spaces (e.g., as induced by kernels). Unfortunately, such decision rules are hardly accessible to humans and cannot easily be used to gain insights about the application domain. Therefore, one often resorts to linear models in combination with variable selection, thereby sacrificing some predictive power for presump- tive interpretability. Here, we introduce the Feature Importance Ranking Measure (FIRM), which by retrospective analysis of arbitrary learning machines allows to achieve both excellent predictive performance and superior interpretation. In contrast to standard raw feature weighting, FIRM takes the underlying correlation structure of the features into account. Thereby, it is able to discover the most relevant features, even if their appearance in the training data is entirely prevented by noise. The desirable properties of FIRM are investigated analytically and illustrated in a few simulations.} }