Skip to content

Commit

Permalink
minor fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
t-vi committed Nov 8, 2017
1 parent 67f8d60 commit 0e8ead8
Show file tree
Hide file tree
Showing 7 changed files with 420 additions and 163 deletions.
4 changes: 2 additions & 2 deletions candlegp/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,15 +198,15 @@ def __init__(self, input_dim, variance=1.0, active_dims=None, name=None):
super(Static, self).__init__(input_dim, active_dims, name=name)
self.variance = parameter.PositiveParam(variance)

def Kdiag(self, X):
def Kdiag(self, X, presliced=False):
return self.variance.get().expand(X.size(0))


class White(Static):
"""
The White kernel
"""
#@params_as_tensors

def K(self, X, X2=None, presliced=False):
if X2 is None:
d = self.variance.get().expand(X.size(0))
Expand Down
1 change: 1 addition & 0 deletions candlegp/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@
from .svgp import SVGP
from .vgp import VGP
from .gpmc import GPMC
from .sgpmc import SGPMC
4 changes: 2 additions & 2 deletions candlegp/models/gpmc.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __init__(self, X, Y, kern, likelihood,
self.num_data = X.size(0)
self.num_latent = num_latent or Y.size(1)
self.V = parameter.Param(self.X.data.new(self.num_data, self.num_latent).zero_())
self.V.prior = priors.Gaussian(0., 1.)
self.V.prior = priors.Gaussian(self.X.data.new(1).fill_(0.), self.X.data.new(1).fill_(1.))

def compute_log_likelihood(self):
"""
Expand Down Expand Up @@ -80,7 +80,7 @@ def predict_f(self, Xnew, full_cov=False):
where F* are points on the GP at Xnew, F=LV are points on the GP at X.
"""
mu, var = conditionals.conditional(Xnew, self.X, self.kern, self.V,
mu, var = conditionals.conditional(Xnew, self.X, self.kern, self.V.get(),
full_cov=full_cov,
q_sqrt=None, whiten=True)
return mu + self.mean_function(Xnew), var
2 changes: 1 addition & 1 deletion candlegp/models/sgpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def compute_log_likelihood(self):
# compute log marginal bound
bound = -0.5 * num_data * output_dim * float(numpy.log(2 * numpy.pi))
bound += -output_dim * torch.sum(torch.log(torch.diag(LB)))
bound -= 0.5 * num_data * output_dim * torch.log(self.likelihood.variance.get())
bound = bound - 0.5 * num_data * output_dim * torch.log(self.likelihood.variance.get())
bound += -0.5 * torch.sum(err**2) / self.likelihood.variance.get()
bound += 0.5 * torch.sum(c**2)
bound += -0.5 * output_dim * torch.sum(Kdiag) / self.likelihood.variance.get()
Expand Down
2 changes: 1 addition & 1 deletion candlegp/priors.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
def wrap(x, ttype=torch.Tensor, **argd):
if numpy.isscalar(x):
x = Variable(ttype([x]),**argd)
elif isinstance(x, [torch.Tensor, torch.DoubleTensor]):
elif isinstance(x, (torch.Tensor, torch.DoubleTensor)):
x = Variable(x, **argd)
return x

Expand Down
148 changes: 98 additions & 50 deletions notebooks/gp_regression.ipynb

Large diffs are not rendered by default.

422 changes: 315 additions & 107 deletions notebooks/mcmc.ipynb

Large diffs are not rendered by default.

0 comments on commit 0e8ead8

Please sign in to comment.