@article {13999,
title = {Fast computation of sums of Gaussians},
journal = {Journal of Machine Learning Research (submitted)},
volume = {4},
year = {2007},
month = {2007///},
abstract = {Evaluating sums of multivariate Gaussian kernels is a key computational task in manyproblems in computational statistics and machine learning. The computational cost of
the direct evaluation of such sums scales as the product of the number of kernel functions
and the evaluation points. The fast Gauss transform proposed by Greengard and Strain
(1991) is a ǫ-exact approximation algorithm that reduces the computational complexity
of the evaluation of the sum of N Gaussians at M points in d dimensions from O(MN)
to O(M + N). However, the constant factor in O(M + N) grows exponentially with
increasing dimensionality d, which makes the algorithm impractical for dimensions greater
than three. In this paper we present a new algorithm where the constant factor is reduced
to asymptotically polynomial order. The reduction is based on a new multivariate Taylor{\textquoteright}s
series expansion (which can act both as a local as well as a far field expansion) scheme
combined with the efficient space subdivision using the k-center algorithm. The proposed
method differs from the original fast Gauss transform in terms of a different factorization,
efficient space subdivision, and the use of different error bounds for each source point.
Algorithm details, error bounds, a procedure to choose the parameters, and numerical
experiments are presented. We also compare our algorithm with the dual-tree algorithm
of Gray and Moore (2003). As an example we show how the proposed method can be used
for very fast ǫ-exact multivariate kernel density estimation.
},
author = {Raykar,V.C. and Duraiswami, Ramani and Gumerov, Nail A.}
}