Skip to content

Commit

Permalink
minibatch mode
Browse files Browse the repository at this point in the history
  • Loading branch information
t-vi committed Nov 21, 2017
1 parent 71b17f4 commit f7c7793
Show file tree
Hide file tree
Showing 7 changed files with 35 additions and 19 deletions.
3 changes: 2 additions & 1 deletion candlegp/models/gpmc.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,14 +54,15 @@ def __init__(self, X, Y, kern, likelihood,
self.V = parameter.Param(self.X.data.new(self.num_data, self.num_latent).zero_())
self.V.prior = priors.Gaussian(self.X.data.new(1).fill_(0.), self.X.data.new(1).fill_(1.))

def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""
Construct a tf function to compute the likelihood of a general GP
model.
\log p(Y, V | theta).
"""
assert X is None and Y is None, "{} does not support minibatch mode".format(str(type(self)))
K = self.kern.K(self.X)
L = torch.potrf(
K + Variable(torch.eye(self.X.size(0), out=K.data.new()) * self.jitter_level), upper=False)
Expand Down
3 changes: 2 additions & 1 deletion candlegp/models/gpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,14 @@ def __init__(self, X, Y, kern, mean_function=None, **kwargs):
super(GPR,self).__init__(X, Y, kern, likelihood, mean_function, **kwargs)
self.num_latent = Y.size(1)

def compute_log_likelihood(self):
def compute_log_likelihood(self, X = None, Y = None):
"""
Construct a tensorflow function to compute the likelihood.
\log p(Y | theta).
"""
assert X is None and Y is None, "{} does not support minibatch mode".format(str(type(self)))
K = self.kern.K(self.X) + Variable(torch.eye(self.X.size(0),out=self.X.data.new())) * self.likelihood.variance.get()
L = torch.potrf(K, upper=False)
m = self.mean_function(self.X)
Expand Down
10 changes: 5 additions & 5 deletions candlegp/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,19 +75,19 @@ def compute_log_prior(self):
pass

@abc.abstractmethod
def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""Compute the log likelihood of the model."""
pass

def objective(self):
pos_objective = self.compute_log_likelihood()
def objective(self, X=None, Y=None):
pos_objective = self.compute_log_likelihood(X, Y)
for param in self.parameters():
if isinstance(param, parameter.ParamWithPrior):
pos_objective = pos_objective + param.get_prior()
return -pos_objective

def forward(self):
return self.objective()
def forward(self, X=None, Y=None):
return self.objective(X, Y)

@abc.abstractmethod
def predict_f(self, Xnew, full_cov=False):
Expand Down
15 changes: 10 additions & 5 deletions candlegp/models/sgpmc.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,20 +85,25 @@ def __init__(self, X, Y, kern, likelihood, Z,
self.num_latent = num_latent or Y.size(1)
self.num_inducing = Z.size(0)
self.Z = parameter.Param(Z)
self.V = parameter.Param(self.X.data.new(self.num_inducing, self.num_latent).zero_())
self.V.prior = priors.Gaussian(self.X.data.new(1).fill_(0.), self.X.data.new(1).fill_(1.))
self.V = parameter.Param(self.Z.data.new(self.num_inducing, self.num_latent).zero_())
self.V.prior = priors.Gaussian(self.Z.data.new(1).fill_(0.), self.Z.data.new(1).fill_(1.))

def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""
Construct a tf function to compute the likelihood of a general GP
model.
\log p(Y, V | theta).
"""
if X is None:
X = self.X
if Y is None:
Y = self.Y

# get the (marginals of) q(f): exactly predicting!
fmean, fvar = self.predict_f(self.X, full_cov=False)
return self.likelihood.variational_expectations(fmean, fvar, self.Y).sum()
fmean, fvar = self.predict_f(X, full_cov=False)
return self.likelihood.variational_expectations(fmean, fvar, Y).sum()

def predict_f(self, Xnew, full_cov=False):
"""
Expand Down
7 changes: 5 additions & 2 deletions candlegp/models/sgpr.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,12 +117,14 @@ def __init__(self, X, Y, kern, Z, mean_function=None, **kwargs):
self.num_data = X.size(0)
self.num_latent = Y.size(1)

def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""
For a derivation of the terms in here, see the associated
SGPR notebook.
"""

assert X is None and Y is None, "{} does not support minibatch mode".format(str(type(self)))

num_inducing = self.Z.size(0)
num_data = self.Y.size(0)
output_dim = self.Y.size(1)
Expand Down Expand Up @@ -240,11 +242,12 @@ def _common_terms(self):

return err, nu, Luu, L, alpha, beta, gamma

def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""
Construct a tensorflow function to compute the bound on the marginal
likelihood.
"""
assert X is None and Y is None, "{} does not support minibatch mode".format(str(type(self)))

# FITC approximation to the log marginal likelihood is
# log ( normal( y | mean, K_fitc ) )
Expand Down
13 changes: 9 additions & 4 deletions candlegp/models/svgp.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,22 +92,27 @@ def prior_KL(self):
KL = kullback_leiblers.gauss_kl(self.q_mu.get(), self.q_sqrt.get(), K)
return KL

def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""
This gives a variational bound on the model likelihood.
"""

if X is None:
X = self.X
if Y is None:
Y = self.Y

# Get prior KL.
KL = self.prior_KL()

# Get conditionals
fmean, fvar = self.predict_f(self.X, full_cov=False)
fmean, fvar = self.predict_f(X, full_cov=False)

# Get variational expectations.
var_exp = self.likelihood.variational_expectations(fmean, fvar, self.Y)
var_exp = self.likelihood.variational_expectations(fmean, fvar, Y)

# re-scale for minibatch size
scale = float(self.num_data) / self.X.size(0)
scale = float(self.num_data) / X.size(0)

return var_exp.sum() * scale - KL

Expand Down
3 changes: 2 additions & 1 deletion candlegp/models/vgp.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def __init__(self, X, Y, kern, likelihood,
q_sqrt = torch.eye(self.num_data, out=self.X.data.new()).unsqueeze(2).expand(-1,-1,self.num_latent)
self.q_sqrt = parameter.LowerTriangularParam(q_sqrt) # should the diagonal be all positive?

def compute_log_likelihood(self):
def compute_log_likelihood(self, X=None, Y=None):
"""
This method computes the variational lower bound on the likelihood,
which is:
Expand All @@ -76,6 +76,7 @@ def compute_log_likelihood(self):
"""

assert X is None and Y is None, "{} does not support minibatch mode".format(str(type(self)))
# Get prior KL.
KL = kullback_leiblers.gauss_kl_white(self.q_mu.get(), self.q_sqrt.get())

Expand Down

0 comments on commit f7c7793

Please sign in to comment.