diff --git a/RELEASE-NOTES.md b/RELEASE-NOTES.md index 660ab46820..2b42ff764e 100644 --- a/RELEASE-NOTES.md +++ b/RELEASE-NOTES.md @@ -53,6 +53,7 @@ All of the above apply to: - The `is_observed` arguement for `gp.Marginal*` implementations has been deprecated. - In the gp.utils file, the `kmeans_inducing_points` function now passes through `kmeans_kwargs` to scipy's k-means function. - The function `replace_with_values` function has been added to `gp.utils`. + - `MarginalSparse` has been renamed `MarginalApprox`. - ... ### Expected breaks diff --git a/pymc/gp/__init__.py b/pymc/gp/__init__.py index 8d63ac3be2..9510f33de4 100644 --- a/pymc/gp/__init__.py +++ b/pymc/gp/__init__.py @@ -13,4 +13,12 @@ # limitations under the License. from pymc.gp import cov, mean, util -from pymc.gp.gp import TP, Latent, LatentKron, Marginal, MarginalKron, MarginalSparse +from pymc.gp.gp import ( + TP, + Latent, + LatentKron, + Marginal, + MarginalApprox, + MarginalKron, + MarginalSparse, +) diff --git a/pymc/gp/gp.py b/pymc/gp/gp.py index b8a7651959..66a6abb34c 100644 --- a/pymc/gp/gp.py +++ b/pymc/gp/gp.py @@ -35,7 +35,7 @@ ) from pymc.math import cartesian, kron_diag, kron_dot, kron_solve_lower, kron_solve_upper -__all__ = ["Latent", "Marginal", "TP", "MarginalSparse", "LatentKron", "MarginalKron"] +__all__ = ["Latent", "Marginal", "TP", "MarginalApprox", "LatentKron", "MarginalKron"] class Base: @@ -597,11 +597,11 @@ def _predict_at(self, Xnew, diag=False, pred_noise=False, given=None, jitter=0.0 @conditioned_vars(["X", "Xu", "y", "sigma"]) -class MarginalSparse(Marginal): +class MarginalApprox(Marginal): R""" Approximate marginal Gaussian process. - The `gp.MarginalSparse` class is an implementation of the sum of a GP + The `gp.MarginalApprox` class is an implementation of the sum of a GP prior and additive noise. It has `marginal_likelihood`, `conditional` and `predict` methods. This GP implementation can be used to implement regression on data that is normally distributed. The @@ -619,6 +619,7 @@ class MarginalSparse(Marginal): The mean function. Defaults to zero. approx: string The approximation to use. Must be one of `VFE`, `FITC` or `DTC`. + Default is VFE. Examples -------- @@ -635,7 +636,7 @@ class MarginalSparse(Marginal): cov_func = pm.gp.cov.ExpQuad(1, ls=0.1) # Specify the GP. The default mean function is `Zero`. - gp = pm.gp.MarginalSparse(cov_func=cov_func, approx="FITC") + gp = pm.gp.MarginalApprox(cov_func=cov_func, approx="FITC") # Place a GP prior over the function f. sigma = pm.HalfCauchy("sigma", beta=3) @@ -657,11 +658,14 @@ class MarginalSparse(Marginal): - Titsias, M. (2009). Variational Learning of Inducing Variables in Sparse Gaussian Processes. + + - Bauer, M., van der Wilk, M., and Rasmussen, C. E. (2016). Understanding + Probabilistic Sparse Gaussian Process Approximations. """ _available_approx = ("FITC", "VFE", "DTC") - def __init__(self, mean_func=Zero(), cov_func=Constant(0.0), approx="FITC"): + def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)): if approx not in self._available_approx: raise NotImplementedError(approx) self.approx = approx @@ -866,6 +870,16 @@ def conditional(self, name, Xnew, pred_noise=False, given=None, jitter=0.0, **kw return pm.MvNormal(name, mu=mu, cov=cov, **kwargs) +@conditioned_vars(["X", "Xu", "y", "sigma"]) +class MarginalSparse(MarginalApprox): + def __init__(self, approx="VFE", *, mean_func=Zero(), cov_func=Constant(0.0)): + warnings.warn( + "gp.MarginalSparse has been renamed to gp.MarginalApprox.", + FutureWarning, + ) + super().__init__(mean_func=mean_func, cov_func=cov_func, approx=approx) + + @conditioned_vars(["Xs", "f"]) class LatentKron(Base): R""" diff --git a/pymc/tests/test_gp.py b/pymc/tests/test_gp.py index 7ae9b23c86..ddba55eec5 100644 --- a/pymc/tests/test_gp.py +++ b/pymc/tests/test_gp.py @@ -840,9 +840,9 @@ def testLatent2(self): npt.assert_allclose(latent_logp, self.logp, atol=5) -class TestMarginalVsMarginalSparse: +class TestMarginalVsMarginalApprox: R""" - Compare logp of models Marginal and MarginalSparse. + Compare logp of models Marginal and MarginalApprox. Should be nearly equal when inducing points are same as inputs. """ @@ -871,7 +871,7 @@ def testApproximations(self, approx): with pm.Model() as model: cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx) + gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx) f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) p = gp.conditional("p", self.Xnew) approx_logp = model.logp({"f": self.y, "p": self.pnew}) @@ -882,7 +882,7 @@ def testPredictVar(self, approx): with pm.Model() as model: cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx=approx) + gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx=approx) f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) mu1, var1 = self.gp.predict(self.Xnew, diag=True) mu2, var2 = gp.predict(self.Xnew, diag=True) @@ -893,7 +893,7 @@ def testPredictCov(self): with pm.Model() as model: cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) mean_func = pm.gp.mean.Constant(0.5) - gp = pm.gp.MarginalSparse(mean_func=mean_func, cov_func=cov_func, approx="DTC") + gp = pm.gp.MarginalApprox(mean_func=mean_func, cov_func=cov_func, approx="DTC") f = gp.marginal_likelihood("f", self.X, self.X, self.y, self.sigma) mu1, cov1 = self.gp.predict(self.Xnew, pred_noise=True) mu2, cov2 = gp.predict(self.Xnew, pred_noise=True) @@ -945,17 +945,17 @@ def testAdditiveMarginal(self): npt.assert_allclose(logp1, logp2, atol=0, rtol=1e-2) @pytest.mark.parametrize("approx", ["FITC", "VFE", "DTC"]) - def testAdditiveMarginalSparse(self, approx): + def testAdditiveMarginalApprox(self, approx): Xu = np.random.randn(10, 3) sigma = 0.1 with pm.Model() as model1: - gp1 = pm.gp.MarginalSparse( + gp1 = pm.gp.MarginalApprox( mean_func=self.means[0], cov_func=self.covs[0], approx=approx ) - gp2 = pm.gp.MarginalSparse( + gp2 = pm.gp.MarginalApprox( mean_func=self.means[1], cov_func=self.covs[1], approx=approx ) - gp3 = pm.gp.MarginalSparse( + gp3 = pm.gp.MarginalApprox( mean_func=self.means[2], cov_func=self.covs[2], approx=approx ) @@ -964,7 +964,7 @@ def testAdditiveMarginalSparse(self, approx): model1_logp = model1.logp({"fsum": self.y}) with pm.Model() as model2: - gptot = pm.gp.MarginalSparse( + gptot = pm.gp.MarginalApprox( mean_func=reduce(add, self.means), cov_func=reduce(add, self.covs), approx=approx ) fsum = gptot.marginal_likelihood("f", self.X, Xu, self.y, noise=sigma) @@ -1017,15 +1017,15 @@ def testAdditiveSparseRaises(self): # cant add different approximations with pm.Model() as model: cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) - gp1 = pm.gp.MarginalSparse(cov_func=cov_func, approx="DTC") - gp2 = pm.gp.MarginalSparse(cov_func=cov_func, approx="FITC") + gp1 = pm.gp.MarginalApprox(cov_func=cov_func, approx="DTC") + gp2 = pm.gp.MarginalApprox(cov_func=cov_func, approx="FITC") with pytest.raises(Exception) as e_info: gp1 + gp2 def testAdditiveTypeRaises1(self): with pm.Model() as model: cov_func = pm.gp.cov.ExpQuad(3, [0.1, 0.2, 0.3]) - gp1 = pm.gp.MarginalSparse(cov_func=cov_func, approx="DTC") + gp1 = pm.gp.MarginalApprox(cov_func=cov_func, approx="DTC") gp2 = pm.gp.Marginal(cov_func=cov_func) with pytest.raises(Exception) as e_info: gp1 + gp2