Skip to content

Commit

Permalink
Merge pull request #5 from JuliaGaussianProcesses/julia-formatter
Browse files Browse the repository at this point in the history
Add Julia formatter
  • Loading branch information
simsurace authored Mar 6, 2024
2 parents 95244c8 + 2c327fd commit fb8359a
Show file tree
Hide file tree
Showing 8 changed files with 111 additions and 78 deletions.
1 change: 1 addition & 0 deletions .JuliaFormatter
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
style = "blue"
31 changes: 31 additions & 0 deletions .github/workflows/Format.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
name: Format suggestions

on:
pull_request:

concurrency:
# Skip intermediate builds: always.
# Cancel intermediate builds: only if it is a pull request build.
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }}

jobs:
format:
runs-on: ubuntu-latest
permissions:
contents: read
checks: write
issues: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
with:
version: 1
- run: |
julia -e 'using Pkg; Pkg.add("JuliaFormatter")'
julia -e 'using JuliaFormatter; format("."; verbose=true)'
- uses: reviewdog/action-suggester@v1
with:
tool_name: JuliaFormatter
fail_on_error: true
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
[![Docs: dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://JuliaGaussianProcesses.github.io/EasyGPs.jl/dev)
[![CI](https://github.com/JuliaGaussianProcesses/EasyGPs.jl/actions/workflows/CI.yml/badge.svg)](https://github.com/JuliaGaussianProcesses/EasyGPs.jl/actions/workflows/CI.yml)
[![Codecov](https://codecov.io/gh/JuliaGaussianProcesses/EasyGPs.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/JuliaGaussianProcesses/EasyGPs.jl/tree/master)
[![Code Style: Blue](https://img.shields.io/badge/code%20style-blue-4495d1.svg)](https://github.com/JuliaDiff/BlueStyle)

EasyGPs.jl is a package that defines a high-level API for the JuliaGaussianProcesses
ecosystem. It handles model parameterization and training, allowing users to focus on the
Expand Down
26 changes: 12 additions & 14 deletions docs/make.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
### Process examples
using Pkg
Pkg.add(Pkg.PackageSpec(; url="https://github.com/JuliaGaussianProcesses/JuliaGPsDocs.jl")) # While the package is unregistered, it's a workaround
Pkg.add(
Pkg.PackageSpec(; url = "https://github.com/JuliaGaussianProcesses/JuliaGPsDocs.jl"),
) # While the package is unregistered, it's a workaround

using JuliaGPsDocs

Expand All @@ -18,24 +20,20 @@ DocMeta.setdocmeta!(
quote
using EasyGPs
end; # we have to load all packages used (implicitly) within jldoctest blocks in the API docstrings
recursive=true,
recursive = true,
)

makedocs(;
sitename="EasyGPs.jl",
format=Documenter.HTML(;
size_threshold_ignore=[
"examples/0-mauna-loa/index.md",
],
),
modules=[EasyGPs],
pages=[
sitename = "EasyGPs.jl",
format = Documenter.HTML(; size_threshold_ignore = ["examples/0-mauna-loa/index.md"]),
modules = [EasyGPs],
pages = [
"Home" => "index.md",
"Examples" => JuliaGPsDocs.find_generated_examples(EasyGPs),
],
warnonly=true,
checkdocs=:exports,
doctestfilters=JuliaGPsDocs.DOCTEST_FILTERS,
warnonly = true,
checkdocs = :exports,
doctestfilters = JuliaGPsDocs.DOCTEST_FILTERS,
)

deploydocs(; repo="github.com/JuliaGaussianProcesses/EasyGPs.jl.git", push_preview=true)
deploydocs(; repo = "github.com/JuliaGaussianProcesses/EasyGPs.jl.git", push_preview = true)
34 changes: 19 additions & 15 deletions examples/0-mauna-loa/script.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ using Plots # visualisation
# Let's load and visualize the dataset.

(xtrain, ytrain), (xtest, ytest) = let
data = CSV.read(joinpath(@__DIR__, "CO2_data.csv"), Tables.matrix; header=0)
data = CSV.read(joinpath(@__DIR__, "CO2_data.csv"), Tables.matrix; header = 0)
year = data[:, 1]
co2 = data[:, 2]

Expand All @@ -29,9 +29,9 @@ using Plots # visualisation
end

function plotdata()
plot(; xlabel="year", ylabel="CO₂ [ppm]", legend=:bottomright)
scatter!(xtrain, ytrain; label="training data", ms=2, markerstrokewidth=0)
return scatter!(xtest, ytest; label="test data", ms=2, markerstrokewidth=0)
plot(; xlabel = "year", ylabel = "CO₂ [ppm]", legend = :bottomright)
scatter!(xtrain, ytrain; label = "training data", ms = 2, markerstrokewidth = 0)
return scatter!(xtest, ytest; label = "test data", ms = 2, markerstrokewidth = 0)
end

plotdata()
Expand All @@ -42,10 +42,12 @@ plotdata()
# original tutorial.

k_smooth_trend = exp(8.0) * with_lengthscale(SEKernel(), exp(4.0))#with_lengthscale(SEKernel(), exp(4.0))
k_seasonality = exp(2.0) * PeriodicKernel(; r=[0.5]) *
with_lengthscale(SEKernel(), exp(4.0))
k_medium_term_irregularities = 1.0 * with_lengthscale(RationalQuadraticKernel(; α=exp(-1.0)), 1.0)
k_noise_terms = exp(-4.0) * with_lengthscale(SEKernel(), exp(-2.0)) + exp(-4.0) * WhiteKernel()
k_seasonality =
exp(2.0) * PeriodicKernel(; r = [0.5]) * with_lengthscale(SEKernel(), exp(4.0))
k_medium_term_irregularities =
1.0 * with_lengthscale(RationalQuadraticKernel(; α = exp(-1.0)), 1.0)
k_noise_terms =
exp(-4.0) * with_lengthscale(SEKernel(), exp(-2.0)) + exp(-4.0) * WhiteKernel()
kernel = k_smooth_trend + k_seasonality + k_medium_term_irregularities + k_noise_terms
#md nothing #hide

Expand All @@ -69,11 +71,11 @@ fpost_init = posterior(gp(xtrain), ytrain)
# By setting `ribbon_scale=2` we visualize the uncertainty band with ``\pm 2``
# (instead of the default ``\pm 1``) standard deviations.

plot_gp!(f; label) = plot!(f(1920:0.2:2030); ribbon_scale=2, linewidth=1, label)
plot_gp!(f; label) = plot!(f(1920:0.2:2030); ribbon_scale = 2, linewidth = 1, label)
#md nothing #hide

plotdata()
plot_gp!(fpost_init; label="posterior f(⋅)")
plot_gp!(fpost_init; label = "posterior f(⋅)")

# A reasonable fit to the data, but poor extrapolation away from the observations!

Expand All @@ -84,11 +86,13 @@ plot_gp!(fpost_init; label="posterior f(⋅)")
# We pass an option to choose the exact same optimizer as in the original tutorial.

@time fitted_gp = EasyGPs.fit(
gp, xtrain, ytrain;
gp,
xtrain,
ytrain;
optimizer = Optim.LBFGS(;
alphaguess=Optim.LineSearches.InitialStatic(; scaled=true),
linesearch=Optim.LineSearches.BackTracking(),
)
alphaguess = Optim.LineSearches.InitialStatic(; scaled = true),
linesearch = Optim.LineSearches.BackTracking(),
),
)
#md nothing #hide

Expand All @@ -104,4 +108,4 @@ fpost_opt.prior.kernel
# And, finally, we can visualize our optimized posterior GP:

plotdata()
plot_gp!(fpost_opt; label="optimized posterior f(⋅)")
plot_gp!(fpost_opt; label = "optimized posterior f(⋅)")
63 changes: 28 additions & 35 deletions src/EasyGPs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,16 +61,11 @@ Takes a callable `model` and returns the optimal parameter, starting with initia
`θ0`. In order to work, there needs to be an implementation of `EasyGPs.costfunction` taking
two arguments, the first of which is of type `typeof(model(θ0))`.
"""
function optimize(
model, θ0, data;
iterations = 1000,
optimizer = Optim.BFGS(),
kwargs...
)
function optimize(model, θ0, data; iterations = 1000, optimizer = Optim.BFGS(), kwargs...)
par0, unflatten = ParameterHandling.flatten(θ0)
optf = Optimization.OptimizationFunction(
(par, data) -> costfunction(model(unflatten(par)), data),
Optimization.AutoZygote()
Optimization.AutoZygote(),
)
prob = Optimization.OptimizationProblem(optf, par0, data)
sol = Optimization.solve(prob, optimizer; maxiters = iterations)
Expand All @@ -83,7 +78,7 @@ end
Check whether two things are equal for the purposes of this library. For this to be true,
roughly speaking the objects must be of the same type and have the same parameters.
"""
_isequal(::T1, ::T2) where {T1, T2} = false
_isequal(::T1, ::T2) where {T1,T2} = false



Expand All @@ -99,21 +94,20 @@ _isequal(m1::ConstMean, m2::ConstMean) = isapprox(m1.c, m2.c)


# Simple kernels
KernelsWithoutParameters = Union{
SEKernel, Matern32Kernel, Matern52Kernel, WhiteKernel
}
KernelsWithoutParameters = Union{SEKernel,Matern32Kernel,Matern52Kernel,WhiteKernel}

extract_parameters(::T) where T <: KernelsWithoutParameters = nothing
apply_parameters(k::T, θ) where T <: KernelsWithoutParameters = k
_isequal(k1::T, k2::T) where T <: KernelsWithoutParameters = true
extract_parameters(::T) where {T<:KernelsWithoutParameters} = nothing
apply_parameters(k::T, θ) where {T<:KernelsWithoutParameters} = k
_isequal(k1::T, k2::T) where {T<:KernelsWithoutParameters} = true

extract_parameters(k::PeriodicKernel) = ParameterHandling.positive(only(k.r))
apply_parameters(::PeriodicKernel, θ) = PeriodicKernel(r = [θ])
_isequal(k1::T, k2::T) where T <: PeriodicKernel = k1.r k2.r
_isequal(k1::T, k2::T) where {T<:PeriodicKernel} = k1.r k2.r

extract_parameters(k::RationalQuadraticKernel) = ParameterHandling.positive(only(k.α))
apply_parameters(k::RationalQuadraticKernel, θ) = RationalQuadraticKernel(; α = θ, metric = k.metric)
_isequal(k1::T, k2::T) where T <: RationalQuadraticKernel = true
apply_parameters(k::RationalQuadraticKernel, θ) =
RationalQuadraticKernel(; α = θ, metric = k.metric)
_isequal(k1::T, k2::T) where {T<:RationalQuadraticKernel} = true



Expand All @@ -124,7 +118,8 @@ _isequal(k1::KernelSum, k2::KernelSum) = mapreduce(_isequal, &, k1.kernels, k2.k

extract_parameters(k::KernelProduct) = map(extract_parameters, k.kernels)
apply_parameters(k::KernelProduct, θ) = KernelProduct(map(apply_parameters, k.kernels, θ))
_isequal(k1::KernelProduct, k2::KernelProduct) = mapreduce(_isequal, &, k1.kernels, k2.kernels)
_isequal(k1::KernelProduct, k2::KernelProduct) =
mapreduce(_isequal, &, k1.kernels, k2.kernels)

function extract_parameters(k::TransformedKernel)
return (extract_parameters(k.kernel), extract_parameters(k.transform))
Expand All @@ -133,7 +128,7 @@ end
function apply_parameters(k::TransformedKernel, θ)
return TransformedKernel(
apply_parameters(k.kernel, θ[1]),
apply_parameters(k.transform, θ[2])
apply_parameters(k.transform, θ[2]),
)
end

Expand All @@ -146,10 +141,7 @@ function extract_parameters(k::ScaledKernel)
end

function apply_parameters(k::ScaledKernel, θ)
return ScaledKernel(
apply_parameters(k.kernel, θ[1]),
θ[2]
)
return ScaledKernel(apply_parameters(k.kernel, θ[1]), θ[2])
end

function _isequal(k1::ScaledKernel, k2::ScaledKernel)
Expand All @@ -168,22 +160,24 @@ _isequal(t1::ScaleTransform, t2::ScaleTransform) = isapprox(t1.s, t2.s)
# Likelihoods
extract_parameters(::BernoulliLikelihood) = nothing
apply_parameters(l::BernoulliLikelihood, θ) = l
_isequal(l1::T, l2::T) where T <: BernoulliLikelihood = true
_isequal(l1::T, l2::T) where {T<:BernoulliLikelihood} = true

extract_parameters(::PoissonLikelihood) = nothing
apply_parameters(l::PoissonLikelihood, θ) = l
_isequal(l1::T, l2::T) where T <: PoissonLikelihood = true
_isequal(l1::T, l2::T) where {T<:PoissonLikelihood} = true



# GPs
extract_parameters(f::GP) = (extract_parameters(f.mean), extract_parameters(f.kernel))
apply_parameters(f::GP, θ) = GP(apply_parameters(f.mean, θ[1]), apply_parameters(f.kernel, θ[2]))
apply_parameters(f::GP, θ) =
GP(apply_parameters(f.mean, θ[1]), apply_parameters(f.kernel, θ[2]))
costfunction(f::GP, data) = -logpdf(f(data.x), data.y)
_isequal(f1::GP, f2::GP) = _isequal(f1.mean, f2.mean) && _isequal(f1.kernel, f2.kernel)

extract_parameters(f::LatentGP) = (extract_parameters(f.f), extract_parameters(f.lik))
apply_parameters(f::LatentGP, θ) = LatentGP(apply_parameters(f.f, θ[1]), apply_parameters(f.lik, θ[2]), f.Σy)
apply_parameters(f::LatentGP, θ) =
LatentGP(apply_parameters(f.f, θ[1]), apply_parameters(f.lik, θ[2]), f.Σy)



Expand Down Expand Up @@ -214,7 +208,7 @@ _isequal(d1::MvNormal, d2::MvNormal) = isapprox(d1.μ, d1.μ) && isapprox(d1.Σ,


# Custom wrappers
struct NoisyGP{T <: GP, Tn <: Real}
struct NoisyGP{T<:GP,Tn<:Real}
gp::T
obs_noise::Tn
end
Expand All @@ -223,12 +217,14 @@ end

with_gaussian_noise(gp::GP, obs_noise::Real) = NoisyGP(gp, obs_noise)

extract_parameters(f::NoisyGP) = (extract_parameters(f.gp), ParameterHandling.positive(f.obs_noise, exp, 1e-6))
extract_parameters(f::NoisyGP) =
(extract_parameters(f.gp), ParameterHandling.positive(f.obs_noise, exp, 1e-6))
apply_parameters(f::NoisyGP, θ) = NoisyGP(apply_parameters(f.gp, θ[1]), θ[2])
costfunction(f::NoisyGP, data) = -logpdf(f(data.x), data.y)
_isequal(f1::NoisyGP, f2::NoisyGP) = _isequal(f1.gp, f2.gp) && isapprox(f1.obs_noise, f2.obs_noise)
_isequal(f1::NoisyGP, f2::NoisyGP) =
_isequal(f1.gp, f2.gp) && isapprox(f1.obs_noise, f2.obs_noise)

struct SVGP{T <: LatentGP, Ts <: SVA}
struct SVGP{T<:LatentGP,Ts<:SVA}
lgp::T
sva::Ts
fixed_inducing_points::Bool
Expand All @@ -237,10 +233,7 @@ end
SVGP(lgp, sva; fixed_inducing_points) = SVGP(lgp, sva, fixed_inducing_points)

function extract_parameters(f::SVGP)
return (
extract_parameters(f.lgp),
extract_parameters(f.sva, f.fixed_inducing_points),
)
return (extract_parameters(f.lgp), extract_parameters(f.sva, f.fixed_inducing_points))
end

function apply_parameters(f::SVGP, θ)
Expand Down
15 changes: 8 additions & 7 deletions test/integration_tests.jl
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
@testitem "GP without noise" begin
kernel = 2. * with_lengthscale(SEKernel(), 1.) + 3. * Matern32Kernel() * Matern52Kernel()
gp = GP(3., kernel)
x = 0.01:0.01:1.
kernel =
2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel()
gp = GP(3.0, kernel)
x = 0.01:0.01:1.0
y = rand(gp(x, 0.1))
fitted_gp = EasyGPs.fit(gp, x, y; iterations = 1)
@test fitted_gp isa typeof(gp)
@test !EasyGPs._isequal(fitted_gp, gp)
end

@testitem "GP with Gaussian noise" begin
kernel = 2. * with_lengthscale(SEKernel(), 1.) + 3. * WhiteKernel()
gp = with_gaussian_noise(GP(3., kernel), 0.1)
x = 0.01:0.01:1.
kernel = 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * WhiteKernel()
gp = with_gaussian_noise(GP(3.0, kernel), 0.1)
x = 0.01:0.01:1.0
y = rand(gp.gp(x, 0.1))
fitted_gp = EasyGPs.fit(gp, x, y; iterations = 1)
@test fitted_gp isa typeof(gp)
@test !EasyGPs._isequal(fitted_gp, gp)
end

@testitem "Sparse variational 2d GP with Poisson likelihood" begin
kernel = 1. * SEKernel()
kernel = 1.0 * SEKernel()
lgp = LatentGP(GP(0.0, kernel), PoissonLikelihood(), 1e-6)
x = rand(100, 2) |> RowVecs
y = round.(Int, 10 .* sum.(abs2, x))
Expand Down
18 changes: 11 additions & 7 deletions test/unit_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,18 @@ end
@testitem "parameterize" begin
import ParameterHandling
for object in (
ZeroMean(), ConstMean(1.),
SEKernel(), Matern32Kernel(), Matern52Kernel(),
with_lengthscale(SEKernel(), 2.),
2. * SEKernel(), 3. * SEKernel() + 2. * Matern32Kernel(),
2. * Matern32Kernel() * SEKernel(),
2. * with_lengthscale(SEKernel(), 1.) + 3. * Matern32Kernel() * Matern52Kernel(),
ZeroMean(),
ConstMean(1.0),
SEKernel(),
Matern32Kernel(),
Matern52Kernel(),
with_lengthscale(SEKernel(), 2.0),
2.0 * SEKernel(),
3.0 * SEKernel() + 2.0 * Matern32Kernel(),
2.0 * Matern32Kernel() * SEKernel(),
2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel(),
BernoulliLikelihood(),
PoissonLikelihood()
PoissonLikelihood(),
)
model, θ = EasyGPs.parameterize(object)
new_object = @inferred model(θ)
Expand Down

0 comments on commit fb8359a

Please sign in to comment.