@inproceedings{09d2e367ab7a4d8c820708b580cbbf23,
title = "Variational inference for infinite mixtures of sparse Gaussian processes through KL-correction",
abstract = "We propose a new approximation method for Gaussian process (GP) regression based on the mixture of experts structure and variational inference. Our model is essentially an infinite mixture model in which each component is composed of a Gaussian distribution over the input space, and a Gaussian process expert over the output space. Each expert is a sparse GP model augmented with its own set of inducing points. Variational inference is made feasible by assuming that the training outputs are independent given the inducing points. In previous works on variational mixture of GP experts, the inducing points are selected through a greedy selection algorithm, which is computationally expensive. In our method, both the inducing points and hyperparameters of the experts are learned through maximizing an improved lower bound of the marginal likelihood. Experiments on benchmark datasets show the advantages of the proposed method.",
keywords = "Gaussian process, variational inference",
author = "Nguyen, {T. N.A.} and A. Bouzerdoum and Phung, {S. L.}",
note = "Publisher Copyright: {\textcopyright} 2016 IEEE.; 41st IEEE International Conference on Acoustics, Speech and Signal Processing, ICASSP 2016 ; Conference date: 20-03-2016 Through 25-03-2016",
year = "2016",
month = may,
day = "18",
doi = "10.1109/ICASSP.2016.7472143",
language = "English",
series = "ICASSP, IEEE International Conference on Acoustics, Speech and Signal Processing - Proceedings",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "2579--2583",
booktitle = "2016 IEEE International Conference on Acoustics, Speech and Signal Processing, ICASSP 2016 - Proceedings",
address = "United States",
}