@article {13960,
title = {Fast matrix-vector product based fgmres for kernel machines},
journal = {Copper Mountain Conference on Iterative Methods},
volume = {2},
year = {2010},
month = {2010///},
abstract = {Kernel based approaches for machine learning have gained huge interest in the past decades because of their robustness. In somealgorithms, the primary problem is the solution of a linear system involving the kernel matrix. Iterative Krylov approaches are often used to solve
these efficiently [2, 3]. Fast matrix-vector products can be used to accelerate each Krylov iteration to further optimize the performance. In order
to reduce the number of iterations of the Krylov approach, a preconditioner becomes necessary in many cases. Several researchers have proposed
flexible preconditioning methods where the preconditioner changes with each iteration, and this class of preconditioners are shown to have good
performance [6, 12]. In this paper, we use a Tikhonov regularized kernel matrix as a preconditioner for flexible GMRES [12] to solve kernel matrix
based systems of equations. We use a truncated conjugate gradient (CG) method to solve the preconditioner system and further accelerate each CG
iteration using fast matrix-vector products. The convergence of the proposed preconditioned GMRES is shown on synthetic data. The performance
is further validated on problems in Gaussian process regression and radial basis function interpolation. Improvements are seen in each case.
},
author = {Srinivasan,B.V. and Duraiswami, Ramani and Gumerov, Nail A.}
}