Hossein Moosaei and Milan Hladík. Least squares K-SVCR multi-class classification. In Ilias S. Kotsireas and Panos M. Pardalos, editors, Learning and Intelligent Optimization, LNCS, pp. 117–127, Springer, Cham, 2020.
[PDF] [gzipped postscript] [postscript] [HTML]
The support vector classification-regression machine for K-class classification (K-SVCR) is a novel multi-class classification method based on 1-versus-1-versus-rest structure. In this paper, we propose a least squares version of K-SVCR named as LSK-SVCR. Similarly as the K-SVCR algorithm, this method assess all the training data into a 1-versus-1-versus-rest structure, so that the algorithm generates ternary output −1,0,+1. In LSK-SVCR, the solution of the primal problem is computed by solving only one system of linear equations instead of solving the dual problem, which is a convex quadratic programming problem in K-SVCR. Experimental results on several benchmark data set show that the LSK-SVCR has better performance in the aspects of predictive accuracy and learning speed.
@inCollection{MooHla2020a,
author = "Hossein Moosaei and Milan Hlad\'{\i}k",
title = "Least squares {K-SVCR} multi-class classification",
editor = "Kotsireas, Ilias S. and Pardalos, Panos M.",
booktitle = "Learning and Intelligent Optimization",
fbooktitle = "Learning and Intelligent Optimization, 14th International Conference, LION 14, Athens, Greece, May 24-28, 2020, Revised Selected Papers",
publisher = "Springer",
address = "Cham",
series = "LNCS",
fseries = "Lecture Notes in Computer Science",
volume = "12096",
pages = "117-127",
year = "2020",
doi = "10.1007/978-3-030-53552-0_13",
isbn = "978-3-030-53552-0",
issn = "0302-9743",
url = "https://doi.org/10.1007/978-3-030-53552-0_13",
bib2html_dl_html = "https://link.springer.com/chapter/10.1007/978-3-030-53552-0_13",
bib2html_dl_pdf = "https://rdcu.be/c5uBX",
abstract = "The support vector classification-regression machine for K-class classification (K-SVCR) is a novel multi-class classification method based on 1-versus-1-versus-rest structure. In this paper, we propose a least squares version of K-SVCR named as LSK-SVCR. Similarly as the K-SVCR algorithm, this method assess all the training data into a 1-versus-1-versus-rest structure, so that the algorithm generates ternary output {−1,0,+1}. In LSK-SVCR, the solution of the primal problem is computed by solving only one system of linear equations instead of solving the dual problem, which is a convex quadratic programming problem in K-SVCR. Experimental results on several benchmark data set show that the LSK-SVCR has better performance in the aspects of predictive accuracy and learning speed.",
keywords = "SVM; K-SVCR; Multi-class classification; Least squares",
}
Generated by bib2html.pl (written by Patrick Riley ) on Fri Nov 07, 2025 16:53:24