In this letter, we propose two new support vector approaches for ordinal regression, which optimize multiple thresholds to define parallel discriminant hyperplanes for the ordinal scales. Both approaches guarantee that the thresholds are properly ordered at the optimal solution. The size of these optimization problems is linear in the number of training samples. The sequential minimal optimization algorithm is adapted for the resulting optimization problems; it is extremely easy to implement and scales efficiently as a quadratic function of the number of examples. The results of numerical experiments on some benchmark and real-world data sets, including applications of ordinal regression to information retrieval, verify the usefulness of these approaches.
%0 Journal Article
%1 Chu07supportvector
%A Chu, Wei
%A Keerthi, S. Sathiya
%D 2007
%J Neural Computation
%K ml ordinal-regression svm
%P 792-815
%T Support vector ordinal regression
%U http://www.gatsby.ucl.ac.uk/~chuwei/paper/svor.pdf
%V 19
%X In this letter, we propose two new support vector approaches for ordinal regression, which optimize multiple thresholds to define parallel discriminant hyperplanes for the ordinal scales. Both approaches guarantee that the thresholds are properly ordered at the optimal solution. The size of these optimization problems is linear in the number of training samples. The sequential minimal optimization algorithm is adapted for the resulting optimization problems; it is extremely easy to implement and scales efficiently as a quadratic function of the number of examples. The results of numerical experiments on some benchmark and real-world data sets, including applications of ordinal regression to information retrieval, verify the usefulness of these approaches.
@article{Chu07supportvector,
abstract = {In this letter, we propose two new support vector approaches for ordinal regression, which optimize multiple thresholds to define parallel discriminant hyperplanes for the ordinal scales. Both approaches guarantee that the thresholds are properly ordered at the optimal solution. The size of these optimization problems is linear in the number of training samples. The sequential minimal optimization algorithm is adapted for the resulting optimization problems; it is extremely easy to implement and scales efficiently as a quadratic function of the number of examples. The results of numerical experiments on some benchmark and real-world data sets, including applications of ordinal regression to information retrieval, verify the usefulness of these approaches.},
added-at = {2012-03-07T17:04:22.000+0100},
author = {Chu, Wei and Keerthi, S. Sathiya},
biburl = {https://www.bibsonomy.org/bibtex/265ced09efcc296f8c3454679e5d9a7d2/jrquevedogmail},
description = {CiteSeerX — Support vector ordinal regression},
interhash = {ac2e542ec3ac8c759c8d23996afc83a9},
intrahash = {65ced09efcc296f8c3454679e5d9a7d2},
journal = {Neural Computation},
keywords = {ml ordinal-regression svm},
pages = {792-815},
timestamp = {2014-09-17T11:28:32.000+0200},
title = {Support vector ordinal regression},
url = {http://www.gatsby.ucl.ac.uk/~chuwei/paper/svor.pdf},
volume = 19,
year = 2007
}