@INCOLLECTION{RaeSchSmoMikOnoMue00, author = {R{\"a}tsch, G. and Sch{\"o}lkopf, B. and Smola, A.J. and Mika, S. and Onoda, T. and M{\"u}ller, K.--R.}, editor = {Smola, A.J. and Bartlett, P.L. and Sch{\"o}lkopf, B. and Schuurmans, D.}, title = "Robust Ensemble Learning", booktitle = "Proc. of the NIPS*98 Workshop on Large Margin Classifiers: Advances in Large Margin Classifiers", publisher = "MIT Press", year = "2000", pages = "207--219", address = "Cambridge, MA", note = {similarly appeared in the Journal of the Japanese Society of AI, 2001}, abstract = "AdaBoost and other ensemble methods have successfully been applied to a number of classification tasks, seemingly defying problems of overfitting. AdaBoost performs gradient descent in an error function with respect to the margin, asymptotically concentrating on the patterns which are hardest to learn. For noisy problems, however, this can be disadvantageous. Indeed, theoretical analysis has shown that the margin distribution, as opposed to just the minimal margin, plays a crucial role in understanding this phenomenon. Loosely speaking, some outliers should be tolerated if this has the benefit of substantially increasing the margin on the remaining points. We propose new boosting algorithms which, similar to $\nu$-Support-Vector Classification, allows us for the possibility of a pre-specified fraction of points to lie in the margin area or even on the wrong side of the decision boundary. Unlike other regularized boosting algorithms, this gives a nicely interpretable way of controlling the trade-off between minimizing the training error and capacity.", pdf = "http://ida.first.fhg.de/\\textasciitilde {}raetsch/ps/RaeSchSmoMikOnoMue00a.pdf", postscript = "http://ida.first.fhg.de/\\textasciitilde {}raetsch/ps/RaeSchSmoMikOnoMue00a.ps" }