@inproceedings{4f377e28f0564d4c8b8b042b4f9e5cc9,
title = "Online efficient learning with quantized KLMS and L 1 regularization",
abstract = "In a recent work, we have proposed the quantized kernel least mean square (QKLMS) algorithm, which is quite effective in online learning sequentially a nonlinear mapping with a slowly growing radial basis function (RBF) structure. In this paper, in order to further reduce the network size, we propose a sparse QKLMS algorithm, which is derived by adding a sparsity inducing l 1 norm penalty of the coefficients to the squared error cost. Simulation examples show that the new algorithm works efficiently, and results in a much sparser network while preserving a desirable performance.",
keywords = "QKLMS, kernel adaptive filtering, l norm penalty, online learning",
author = "Badong Chen and Songlin Zhao and Sohan Seth and Principe, \{Jose C.\}",
year = "2012",
doi = "10.1109/IJCNN.2012.6252455",
language = "英语",
isbn = "9781467314909",
series = "Proceedings of the International Joint Conference on Neural Networks",
booktitle = "2012 International Joint Conference on Neural Networks, IJCNN 2012",
note = "2012 Annual International Joint Conference on Neural Networks, IJCNN 2012, Part of the 2012 IEEE World Congress on Computational Intelligence, WCCI 2012 ; Conference date: 10-06-2012 Through 15-06-2012",
}