Skip to content

Commit

Permalink
add Linear kernel
Browse files Browse the repository at this point in the history
  • Loading branch information
t-vi committed Feb 26, 2020
1 parent e93fd0c commit ecaed13
Showing 1 changed file with 17 additions and 3 deletions.
20 changes: 17 additions & 3 deletions candlegp/kernels.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def __init__(self, input_dim, active_dims=None, name=None):
If active dims is None, it effectively defaults to range(input_dim),
but we store it as a slice for efficiency.
"""
super(Kern, self).__init__()
super().__init__()
self.name = name
self.input_dim = int(input_dim)
if active_dims is None:
Expand Down Expand Up @@ -189,18 +189,32 @@ def __mul__(self, other):

class Static(Kern):
"""
Kernels who don't depend on the value of the inputs are 'Static'. The only
Kernels that do not depend on the value of the inputs are 'Static'. The only
parameter is a variance.
"""

def __init__(self, input_dim, variance=1.0, active_dims=None, name=None):
super(Static, self).__init__(input_dim, active_dims, name=name)
super().__init__(input_dim, active_dims, name=name)
self.variance = parameter.PositiveParam(variance)

def Kdiag(self, X, presliced=False):
return self.variance.get().expand(X.size(0))


class Linear(Kern):
def __init__(self, input_dim, variance=1.0, active_dims=None, name=None):
super().__init__(input_dim, active_dims, name=name)
self.variance = parameter.PositiveParam(variance)

def K(self, X, X2=None, presliced=False):
if X2 is None:
X2 = X
return torch.mm(X * self.variance.get(), X2.t())

def Kdiag(self, X, presliced=False):
return ((X**2) * self.variance.get()).sum(1)


class White(Static):
"""
The White kernel
Expand Down

0 comments on commit ecaed13

Please sign in to comment.