From 643328d9ce0928f85a0f451cc039b6baad0d3fa2 Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace Date: Tue, 5 Mar 2024 13:39:19 +0100 Subject: [PATCH 1/5] Add formatter --- .JuliaFormatter | 1 + .github/workflows/Format.yml | 31 +++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 .JuliaFormatter create mode 100644 .github/workflows/Format.yml diff --git a/.JuliaFormatter b/.JuliaFormatter new file mode 100644 index 0000000..c743950 --- /dev/null +++ b/.JuliaFormatter @@ -0,0 +1 @@ +style = "blue" \ No newline at end of file diff --git a/.github/workflows/Format.yml b/.github/workflows/Format.yml new file mode 100644 index 0000000..ea33c9f --- /dev/null +++ b/.github/workflows/Format.yml @@ -0,0 +1,31 @@ +name: Format suggestions + +on: + pull_request: + +concurrency: + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + +jobs: + format: + runs-on: ubuntu-latest + permissions: + contents: read + checks: write + issues: write + pull-requests: write + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v1 + with: + version: 1 + - run: | + julia -e 'using Pkg; Pkg.add("JuliaFormatter")' + julia -e 'using JuliaFormatter; format("."; verbose=true)' + - uses: reviewdog/action-suggester@v1 + with: + tool_name: JuliaFormatter + fail_on_error: true From c33484f86cfca8b67bb850766b59ca4a4aba28ab Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace Date: Wed, 6 Mar 2024 16:26:23 +0100 Subject: [PATCH 2/5] Run formatter --- examples/0-mauna-loa/script.jl | 4 ++-- src/EasyGPs.jl | 34 +++++++++++++++++----------------- test/integration_tests.jl | 22 +++++++++++----------- test/unit_tests.jl | 10 +++++----- 4 files changed, 35 insertions(+), 35 deletions(-) diff --git a/examples/0-mauna-loa/script.jl b/examples/0-mauna-loa/script.jl index bb99515..f07d7b3 100644 --- a/examples/0-mauna-loa/script.jl +++ b/examples/0-mauna-loa/script.jl @@ -43,7 +43,7 @@ plotdata() k_smooth_trend = exp(8.0) * with_lengthscale(SEKernel(), exp(4.0))#with_lengthscale(SEKernel(), exp(4.0)) k_seasonality = exp(2.0) * PeriodicKernel(; r=[0.5]) * - with_lengthscale(SEKernel(), exp(4.0)) + with_lengthscale(SEKernel(), exp(4.0)) k_medium_term_irregularities = 1.0 * with_lengthscale(RationalQuadraticKernel(; α=exp(-1.0)), 1.0) k_noise_terms = exp(-4.0) * with_lengthscale(SEKernel(), exp(-2.0)) + exp(-4.0) * WhiteKernel() kernel = k_smooth_trend + k_seasonality + k_medium_term_irregularities + k_noise_terms @@ -85,7 +85,7 @@ plot_gp!(fpost_init; label="posterior f(⋅)") @time fitted_gp = EasyGPs.fit( gp, xtrain, ytrain; - optimizer = Optim.LBFGS(; + optimizer=Optim.LBFGS(; alphaguess=Optim.LineSearches.InitialStatic(; scaled=true), linesearch=Optim.LineSearches.BackTracking(), ) diff --git a/src/EasyGPs.jl b/src/EasyGPs.jl index 887d1fd..8c4c105 100644 --- a/src/EasyGPs.jl +++ b/src/EasyGPs.jl @@ -63,8 +63,8 @@ two arguments, the first of which is of type `typeof(model(θ0))`. """ function optimize( model, θ0, data; - iterations = 1000, - optimizer = Optim.BFGS(), + iterations=1000, + optimizer=Optim.BFGS(), kwargs... ) par0, unflatten = ParameterHandling.flatten(θ0) @@ -73,7 +73,7 @@ function optimize( Optimization.AutoZygote() ) prob = Optimization.OptimizationProblem(optf, par0, data) - sol = Optimization.solve(prob, optimizer; maxiters = iterations) + sol = Optimization.solve(prob, optimizer; maxiters=iterations) return unflatten(sol.u) end @@ -83,7 +83,7 @@ end Check whether two things are equal for the purposes of this library. For this to be true, roughly speaking the objects must be of the same type and have the same parameters. """ -_isequal(::T1, ::T2) where {T1, T2} = false +_isequal(::T1, ::T2) where {T1,T2} = false @@ -100,20 +100,20 @@ _isequal(m1::ConstMean, m2::ConstMean) = isapprox(m1.c, m2.c) # Simple kernels KernelsWithoutParameters = Union{ - SEKernel, Matern32Kernel, Matern52Kernel, WhiteKernel + SEKernel,Matern32Kernel,Matern52Kernel,WhiteKernel } -extract_parameters(::T) where T <: KernelsWithoutParameters = nothing -apply_parameters(k::T, θ) where T <: KernelsWithoutParameters = k -_isequal(k1::T, k2::T) where T <: KernelsWithoutParameters = true +extract_parameters(::T) where {T<:KernelsWithoutParameters} = nothing +apply_parameters(k::T, θ) where {T<:KernelsWithoutParameters} = k +_isequal(k1::T, k2::T) where {T<:KernelsWithoutParameters} = true extract_parameters(k::PeriodicKernel) = ParameterHandling.positive(only(k.r)) -apply_parameters(::PeriodicKernel, θ) = PeriodicKernel(r = [θ]) -_isequal(k1::T, k2::T) where T <: PeriodicKernel = k1.r ≈ k2.r +apply_parameters(::PeriodicKernel, θ) = PeriodicKernel(r=[θ]) +_isequal(k1::T, k2::T) where {T<:PeriodicKernel} = k1.r ≈ k2.r extract_parameters(k::RationalQuadraticKernel) = ParameterHandling.positive(only(k.α)) -apply_parameters(k::RationalQuadraticKernel, θ) = RationalQuadraticKernel(; α = θ, metric = k.metric) -_isequal(k1::T, k2::T) where T <: RationalQuadraticKernel = true +apply_parameters(k::RationalQuadraticKernel, θ) = RationalQuadraticKernel(; α=θ, metric=k.metric) +_isequal(k1::T, k2::T) where {T<:RationalQuadraticKernel} = true @@ -168,11 +168,11 @@ _isequal(t1::ScaleTransform, t2::ScaleTransform) = isapprox(t1.s, t2.s) # Likelihoods extract_parameters(::BernoulliLikelihood) = nothing apply_parameters(l::BernoulliLikelihood, θ) = l -_isequal(l1::T, l2::T) where T <: BernoulliLikelihood = true +_isequal(l1::T, l2::T) where {T<:BernoulliLikelihood} = true extract_parameters(::PoissonLikelihood) = nothing apply_parameters(l::PoissonLikelihood, θ) = l -_isequal(l1::T, l2::T) where T <: PoissonLikelihood = true +_isequal(l1::T, l2::T) where {T<:PoissonLikelihood} = true @@ -202,7 +202,7 @@ function apply_parameters(sva::SVA, θ) return SVA(fz, q) end -variational_gaussian(n::Int, T = Float64) = MvNormal(zeros(T, n), Matrix{T}(I, n, n)) +variational_gaussian(n::Int, T=Float64) = MvNormal(zeros(T, n), Matrix{T}(I, n, n)) @@ -214,7 +214,7 @@ _isequal(d1::MvNormal, d2::MvNormal) = isapprox(d1.μ, d1.μ) && isapprox(d1.Σ, # Custom wrappers -struct NoisyGP{T <: GP, Tn <: Real} +struct NoisyGP{T<:GP,Tn<:Real} gp::T obs_noise::Tn end @@ -228,7 +228,7 @@ apply_parameters(f::NoisyGP, θ) = NoisyGP(apply_parameters(f.gp, θ[1]), θ[2]) costfunction(f::NoisyGP, data) = -logpdf(f(data.x), data.y) _isequal(f1::NoisyGP, f2::NoisyGP) = _isequal(f1.gp, f2.gp) && isapprox(f1.obs_noise, f2.obs_noise) -struct SVGP{T <: LatentGP, Ts <: SVA} +struct SVGP{T<:LatentGP,Ts<:SVA} lgp::T sva::Ts fixed_inducing_points::Bool diff --git a/test/integration_tests.jl b/test/integration_tests.jl index 9137263..c348f62 100644 --- a/test/integration_tests.jl +++ b/test/integration_tests.jl @@ -1,32 +1,32 @@ @testitem "GP without noise" begin - kernel = 2. * with_lengthscale(SEKernel(), 1.) + 3. * Matern32Kernel() * Matern52Kernel() - gp = GP(3., kernel) - x = 0.01:0.01:1. + kernel = 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel() + gp = GP(3.0, kernel) + x = 0.01:0.01:1.0 y = rand(gp(x, 0.1)) - fitted_gp = EasyGPs.fit(gp, x, y; iterations = 1) + fitted_gp = EasyGPs.fit(gp, x, y; iterations=1) @test fitted_gp isa typeof(gp) @test !EasyGPs._isequal(fitted_gp, gp) end @testitem "GP with Gaussian noise" begin - kernel = 2. * with_lengthscale(SEKernel(), 1.) + 3. * WhiteKernel() - gp = with_gaussian_noise(GP(3., kernel), 0.1) - x = 0.01:0.01:1. + kernel = 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * WhiteKernel() + gp = with_gaussian_noise(GP(3.0, kernel), 0.1) + x = 0.01:0.01:1.0 y = rand(gp.gp(x, 0.1)) - fitted_gp = EasyGPs.fit(gp, x, y; iterations = 1) + fitted_gp = EasyGPs.fit(gp, x, y; iterations=1) @test fitted_gp isa typeof(gp) @test !EasyGPs._isequal(fitted_gp, gp) end @testitem "Sparse variational 2d GP with Poisson likelihood" begin - kernel = 1. * SEKernel() + kernel = 1.0 * SEKernel() lgp = LatentGP(GP(0.0, kernel), PoissonLikelihood(), 1e-6) x = rand(100, 2) |> RowVecs y = round.(Int, 10 .* sum.(abs2, x)) z = x[begin:5:end] sva = SVA(lgp(z).fx, variational_gaussian(length(z))) - svgp = SVGP(lgp, sva; fixed_inducing_points = true) - fitted_svgp = EasyGPs.fit(svgp, x, y; iterations = 1) + svgp = SVGP(lgp, sva; fixed_inducing_points=true) + fitted_svgp = EasyGPs.fit(svgp, x, y; iterations=1) @test fitted_svgp isa typeof(svgp) @test !EasyGPs._isequal(fitted_svgp, svgp) @test fitted_svgp.sva.fz.x === z diff --git a/test/unit_tests.jl b/test/unit_tests.jl index 04ef612..fcd0798 100644 --- a/test/unit_tests.jl +++ b/test/unit_tests.jl @@ -13,12 +13,12 @@ end @testitem "parameterize" begin import ParameterHandling for object in ( - ZeroMean(), ConstMean(1.), + ZeroMean(), ConstMean(1.0), SEKernel(), Matern32Kernel(), Matern52Kernel(), - with_lengthscale(SEKernel(), 2.), - 2. * SEKernel(), 3. * SEKernel() + 2. * Matern32Kernel(), - 2. * Matern32Kernel() * SEKernel(), - 2. * with_lengthscale(SEKernel(), 1.) + 3. * Matern32Kernel() * Matern52Kernel(), + with_lengthscale(SEKernel(), 2.0), + 2.0 * SEKernel(), 3.0 * SEKernel() + 2.0 * Matern32Kernel(), + 2.0 * Matern32Kernel() * SEKernel(), + 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel(), BernoulliLikelihood(), PoissonLikelihood() ) From 1f8f15fcc2acedfe3cff82905622128c0f3586a2 Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace Date: Wed, 6 Mar 2024 16:26:49 +0100 Subject: [PATCH 3/5] Violate code style --- src/EasyGPs.jl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/EasyGPs.jl b/src/EasyGPs.jl index 8c4c105..15b67ea 100644 --- a/src/EasyGPs.jl +++ b/src/EasyGPs.jl @@ -68,10 +68,7 @@ function optimize( kwargs... ) par0, unflatten = ParameterHandling.flatten(θ0) - optf = Optimization.OptimizationFunction( - (par, data) -> costfunction(model(unflatten(par)), data), - Optimization.AutoZygote() - ) + optf = Optimization.OptimizationFunction((par, data) -> costfunction(model(unflatten(par)), data), Optimization.AutoZygote()) prob = Optimization.OptimizationProblem(optf, par0, data) sol = Optimization.solve(prob, optimizer; maxiters=iterations) return unflatten(sol.u) From a844da214366df459487a3b701033de1fe5aa5bd Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace Date: Wed, 6 Mar 2024 17:17:10 +0100 Subject: [PATCH 4/5] Run formatter --- docs/make.jl | 26 ++++++++--------- examples/0-mauna-loa/script.jl | 36 ++++++++++++----------- src/EasyGPs.jl | 52 ++++++++++++++++------------------ test/integration_tests.jl | 11 +++---- test/unit_tests.jl | 12 +++++--- 5 files changed, 70 insertions(+), 67 deletions(-) diff --git a/docs/make.jl b/docs/make.jl index 2b3a9a9..4694148 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,6 +1,8 @@ ### Process examples using Pkg -Pkg.add(Pkg.PackageSpec(; url="https://github.com/JuliaGaussianProcesses/JuliaGPsDocs.jl")) # While the package is unregistered, it's a workaround +Pkg.add( + Pkg.PackageSpec(; url = "https://github.com/JuliaGaussianProcesses/JuliaGPsDocs.jl"), +) # While the package is unregistered, it's a workaround using JuliaGPsDocs @@ -18,24 +20,20 @@ DocMeta.setdocmeta!( quote using EasyGPs end; # we have to load all packages used (implicitly) within jldoctest blocks in the API docstrings - recursive=true, + recursive = true, ) makedocs(; - sitename="EasyGPs.jl", - format=Documenter.HTML(; - size_threshold_ignore=[ - "examples/0-mauna-loa/index.md", - ], - ), - modules=[EasyGPs], - pages=[ + sitename = "EasyGPs.jl", + format = Documenter.HTML(; size_threshold_ignore = ["examples/0-mauna-loa/index.md"]), + modules = [EasyGPs], + pages = [ "Home" => "index.md", "Examples" => JuliaGPsDocs.find_generated_examples(EasyGPs), ], - warnonly=true, - checkdocs=:exports, - doctestfilters=JuliaGPsDocs.DOCTEST_FILTERS, + warnonly = true, + checkdocs = :exports, + doctestfilters = JuliaGPsDocs.DOCTEST_FILTERS, ) -deploydocs(; repo="github.com/JuliaGaussianProcesses/EasyGPs.jl.git", push_preview=true) +deploydocs(; repo = "github.com/JuliaGaussianProcesses/EasyGPs.jl.git", push_preview = true) diff --git a/examples/0-mauna-loa/script.jl b/examples/0-mauna-loa/script.jl index f07d7b3..1869b5f 100644 --- a/examples/0-mauna-loa/script.jl +++ b/examples/0-mauna-loa/script.jl @@ -17,7 +17,7 @@ using Plots # visualisation # Let's load and visualize the dataset. (xtrain, ytrain), (xtest, ytest) = let - data = CSV.read(joinpath(@__DIR__, "CO2_data.csv"), Tables.matrix; header=0) + data = CSV.read(joinpath(@__DIR__, "CO2_data.csv"), Tables.matrix; header = 0) year = data[:, 1] co2 = data[:, 2] @@ -29,9 +29,9 @@ using Plots # visualisation end function plotdata() - plot(; xlabel="year", ylabel="CO₂ [ppm]", legend=:bottomright) - scatter!(xtrain, ytrain; label="training data", ms=2, markerstrokewidth=0) - return scatter!(xtest, ytest; label="test data", ms=2, markerstrokewidth=0) + plot(; xlabel = "year", ylabel = "CO₂ [ppm]", legend = :bottomright) + scatter!(xtrain, ytrain; label = "training data", ms = 2, markerstrokewidth = 0) + return scatter!(xtest, ytest; label = "test data", ms = 2, markerstrokewidth = 0) end plotdata() @@ -42,10 +42,12 @@ plotdata() # original tutorial. k_smooth_trend = exp(8.0) * with_lengthscale(SEKernel(), exp(4.0))#with_lengthscale(SEKernel(), exp(4.0)) -k_seasonality = exp(2.0) * PeriodicKernel(; r=[0.5]) * - with_lengthscale(SEKernel(), exp(4.0)) -k_medium_term_irregularities = 1.0 * with_lengthscale(RationalQuadraticKernel(; α=exp(-1.0)), 1.0) -k_noise_terms = exp(-4.0) * with_lengthscale(SEKernel(), exp(-2.0)) + exp(-4.0) * WhiteKernel() +k_seasonality = + exp(2.0) * PeriodicKernel(; r = [0.5]) * with_lengthscale(SEKernel(), exp(4.0)) +k_medium_term_irregularities = + 1.0 * with_lengthscale(RationalQuadraticKernel(; α = exp(-1.0)), 1.0) +k_noise_terms = + exp(-4.0) * with_lengthscale(SEKernel(), exp(-2.0)) + exp(-4.0) * WhiteKernel() kernel = k_smooth_trend + k_seasonality + k_medium_term_irregularities + k_noise_terms #md nothing #hide @@ -69,11 +71,11 @@ fpost_init = posterior(gp(xtrain), ytrain) # By setting `ribbon_scale=2` we visualize the uncertainty band with ``\pm 2`` # (instead of the default ``\pm 1``) standard deviations. -plot_gp!(f; label) = plot!(f(1920:0.2:2030); ribbon_scale=2, linewidth=1, label) +plot_gp!(f; label) = plot!(f(1920:0.2:2030); ribbon_scale = 2, linewidth = 1, label) #md nothing #hide plotdata() -plot_gp!(fpost_init; label="posterior f(⋅)") +plot_gp!(fpost_init; label = "posterior f(⋅)") # A reasonable fit to the data, but poor extrapolation away from the observations! @@ -84,11 +86,13 @@ plot_gp!(fpost_init; label="posterior f(⋅)") # We pass an option to choose the exact same optimizer as in the original tutorial. @time fitted_gp = EasyGPs.fit( - gp, xtrain, ytrain; - optimizer=Optim.LBFGS(; - alphaguess=Optim.LineSearches.InitialStatic(; scaled=true), - linesearch=Optim.LineSearches.BackTracking(), - ) + gp, + xtrain, + ytrain; + optimizer = Optim.LBFGS(; + alphaguess = Optim.LineSearches.InitialStatic(; scaled = true), + linesearch = Optim.LineSearches.BackTracking(), + ), ) #md nothing #hide @@ -104,4 +108,4 @@ fpost_opt.prior.kernel # And, finally, we can visualize our optimized posterior GP: plotdata() -plot_gp!(fpost_opt; label="optimized posterior f(⋅)") +plot_gp!(fpost_opt; label = "optimized posterior f(⋅)") diff --git a/src/EasyGPs.jl b/src/EasyGPs.jl index 15b67ea..f44f0dc 100644 --- a/src/EasyGPs.jl +++ b/src/EasyGPs.jl @@ -61,16 +61,14 @@ Takes a callable `model` and returns the optimal parameter, starting with initia `θ0`. In order to work, there needs to be an implementation of `EasyGPs.costfunction` taking two arguments, the first of which is of type `typeof(model(θ0))`. """ -function optimize( - model, θ0, data; - iterations=1000, - optimizer=Optim.BFGS(), - kwargs... -) +function optimize(model, θ0, data; iterations = 1000, optimizer = Optim.BFGS(), kwargs...) par0, unflatten = ParameterHandling.flatten(θ0) - optf = Optimization.OptimizationFunction((par, data) -> costfunction(model(unflatten(par)), data), Optimization.AutoZygote()) + optf = Optimization.OptimizationFunction( + (par, data) -> costfunction(model(unflatten(par)), data), + Optimization.AutoZygote(), + ) prob = Optimization.OptimizationProblem(optf, par0, data) - sol = Optimization.solve(prob, optimizer; maxiters=iterations) + sol = Optimization.solve(prob, optimizer; maxiters = iterations) return unflatten(sol.u) end @@ -96,20 +94,19 @@ _isequal(m1::ConstMean, m2::ConstMean) = isapprox(m1.c, m2.c) # Simple kernels -KernelsWithoutParameters = Union{ - SEKernel,Matern32Kernel,Matern52Kernel,WhiteKernel -} +KernelsWithoutParameters = Union{SEKernel,Matern32Kernel,Matern52Kernel,WhiteKernel} extract_parameters(::T) where {T<:KernelsWithoutParameters} = nothing apply_parameters(k::T, θ) where {T<:KernelsWithoutParameters} = k _isequal(k1::T, k2::T) where {T<:KernelsWithoutParameters} = true extract_parameters(k::PeriodicKernel) = ParameterHandling.positive(only(k.r)) -apply_parameters(::PeriodicKernel, θ) = PeriodicKernel(r=[θ]) +apply_parameters(::PeriodicKernel, θ) = PeriodicKernel(r = [θ]) _isequal(k1::T, k2::T) where {T<:PeriodicKernel} = k1.r ≈ k2.r extract_parameters(k::RationalQuadraticKernel) = ParameterHandling.positive(only(k.α)) -apply_parameters(k::RationalQuadraticKernel, θ) = RationalQuadraticKernel(; α=θ, metric=k.metric) +apply_parameters(k::RationalQuadraticKernel, θ) = + RationalQuadraticKernel(; α = θ, metric = k.metric) _isequal(k1::T, k2::T) where {T<:RationalQuadraticKernel} = true @@ -121,7 +118,8 @@ _isequal(k1::KernelSum, k2::KernelSum) = mapreduce(_isequal, &, k1.kernels, k2.k extract_parameters(k::KernelProduct) = map(extract_parameters, k.kernels) apply_parameters(k::KernelProduct, θ) = KernelProduct(map(apply_parameters, k.kernels, θ)) -_isequal(k1::KernelProduct, k2::KernelProduct) = mapreduce(_isequal, &, k1.kernels, k2.kernels) +_isequal(k1::KernelProduct, k2::KernelProduct) = + mapreduce(_isequal, &, k1.kernels, k2.kernels) function extract_parameters(k::TransformedKernel) return (extract_parameters(k.kernel), extract_parameters(k.transform)) @@ -130,7 +128,7 @@ end function apply_parameters(k::TransformedKernel, θ) return TransformedKernel( apply_parameters(k.kernel, θ[1]), - apply_parameters(k.transform, θ[2]) + apply_parameters(k.transform, θ[2]), ) end @@ -143,10 +141,7 @@ function extract_parameters(k::ScaledKernel) end function apply_parameters(k::ScaledKernel, θ) - return ScaledKernel( - apply_parameters(k.kernel, θ[1]), - θ[2] - ) + return ScaledKernel(apply_parameters(k.kernel, θ[1]), θ[2]) end function _isequal(k1::ScaledKernel, k2::ScaledKernel) @@ -175,12 +170,14 @@ _isequal(l1::T, l2::T) where {T<:PoissonLikelihood} = true # GPs extract_parameters(f::GP) = (extract_parameters(f.mean), extract_parameters(f.kernel)) -apply_parameters(f::GP, θ) = GP(apply_parameters(f.mean, θ[1]), apply_parameters(f.kernel, θ[2])) +apply_parameters(f::GP, θ) = + GP(apply_parameters(f.mean, θ[1]), apply_parameters(f.kernel, θ[2])) costfunction(f::GP, data) = -logpdf(f(data.x), data.y) _isequal(f1::GP, f2::GP) = _isequal(f1.mean, f2.mean) && _isequal(f1.kernel, f2.kernel) extract_parameters(f::LatentGP) = (extract_parameters(f.f), extract_parameters(f.lik)) -apply_parameters(f::LatentGP, θ) = LatentGP(apply_parameters(f.f, θ[1]), apply_parameters(f.lik, θ[2]), f.Σy) +apply_parameters(f::LatentGP, θ) = + LatentGP(apply_parameters(f.f, θ[1]), apply_parameters(f.lik, θ[2]), f.Σy) @@ -199,7 +196,7 @@ function apply_parameters(sva::SVA, θ) return SVA(fz, q) end -variational_gaussian(n::Int, T=Float64) = MvNormal(zeros(T, n), Matrix{T}(I, n, n)) +variational_gaussian(n::Int, T = Float64) = MvNormal(zeros(T, n), Matrix{T}(I, n, n)) @@ -220,10 +217,12 @@ end with_gaussian_noise(gp::GP, obs_noise::Real) = NoisyGP(gp, obs_noise) -extract_parameters(f::NoisyGP) = (extract_parameters(f.gp), ParameterHandling.positive(f.obs_noise, exp, 1e-6)) +extract_parameters(f::NoisyGP) = + (extract_parameters(f.gp), ParameterHandling.positive(f.obs_noise, exp, 1e-6)) apply_parameters(f::NoisyGP, θ) = NoisyGP(apply_parameters(f.gp, θ[1]), θ[2]) costfunction(f::NoisyGP, data) = -logpdf(f(data.x), data.y) -_isequal(f1::NoisyGP, f2::NoisyGP) = _isequal(f1.gp, f2.gp) && isapprox(f1.obs_noise, f2.obs_noise) +_isequal(f1::NoisyGP, f2::NoisyGP) = + _isequal(f1.gp, f2.gp) && isapprox(f1.obs_noise, f2.obs_noise) struct SVGP{T<:LatentGP,Ts<:SVA} lgp::T @@ -234,10 +233,7 @@ end SVGP(lgp, sva; fixed_inducing_points) = SVGP(lgp, sva, fixed_inducing_points) function extract_parameters(f::SVGP) - return ( - extract_parameters(f.lgp), - extract_parameters(f.sva, f.fixed_inducing_points), - ) + return (extract_parameters(f.lgp), extract_parameters(f.sva, f.fixed_inducing_points)) end function apply_parameters(f::SVGP, θ) diff --git a/test/integration_tests.jl b/test/integration_tests.jl index c348f62..b673da6 100644 --- a/test/integration_tests.jl +++ b/test/integration_tests.jl @@ -1,9 +1,10 @@ @testitem "GP without noise" begin - kernel = 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel() + kernel = + 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel() gp = GP(3.0, kernel) x = 0.01:0.01:1.0 y = rand(gp(x, 0.1)) - fitted_gp = EasyGPs.fit(gp, x, y; iterations=1) + fitted_gp = EasyGPs.fit(gp, x, y; iterations = 1) @test fitted_gp isa typeof(gp) @test !EasyGPs._isequal(fitted_gp, gp) end @@ -13,7 +14,7 @@ end gp = with_gaussian_noise(GP(3.0, kernel), 0.1) x = 0.01:0.01:1.0 y = rand(gp.gp(x, 0.1)) - fitted_gp = EasyGPs.fit(gp, x, y; iterations=1) + fitted_gp = EasyGPs.fit(gp, x, y; iterations = 1) @test fitted_gp isa typeof(gp) @test !EasyGPs._isequal(fitted_gp, gp) end @@ -25,8 +26,8 @@ end y = round.(Int, 10 .* sum.(abs2, x)) z = x[begin:5:end] sva = SVA(lgp(z).fx, variational_gaussian(length(z))) - svgp = SVGP(lgp, sva; fixed_inducing_points=true) - fitted_svgp = EasyGPs.fit(svgp, x, y; iterations=1) + svgp = SVGP(lgp, sva; fixed_inducing_points = true) + fitted_svgp = EasyGPs.fit(svgp, x, y; iterations = 1) @test fitted_svgp isa typeof(svgp) @test !EasyGPs._isequal(fitted_svgp, svgp) @test fitted_svgp.sva.fz.x === z diff --git a/test/unit_tests.jl b/test/unit_tests.jl index fcd0798..e8a6b1a 100644 --- a/test/unit_tests.jl +++ b/test/unit_tests.jl @@ -13,14 +13,18 @@ end @testitem "parameterize" begin import ParameterHandling for object in ( - ZeroMean(), ConstMean(1.0), - SEKernel(), Matern32Kernel(), Matern52Kernel(), + ZeroMean(), + ConstMean(1.0), + SEKernel(), + Matern32Kernel(), + Matern52Kernel(), with_lengthscale(SEKernel(), 2.0), - 2.0 * SEKernel(), 3.0 * SEKernel() + 2.0 * Matern32Kernel(), + 2.0 * SEKernel(), + 3.0 * SEKernel() + 2.0 * Matern32Kernel(), 2.0 * Matern32Kernel() * SEKernel(), 2.0 * with_lengthscale(SEKernel(), 1.0) + 3.0 * Matern32Kernel() * Matern52Kernel(), BernoulliLikelihood(), - PoissonLikelihood() + PoissonLikelihood(), ) model, θ = EasyGPs.parameterize(object) new_object = @inferred model(θ) From 2c327fd278c2caaed1467c4ade067f748454c479 Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace Date: Wed, 6 Mar 2024 17:30:23 +0100 Subject: [PATCH 5/5] Add code style badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index e158836..b595f13 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@ [![Docs: dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://JuliaGaussianProcesses.github.io/EasyGPs.jl/dev) [![CI](https://github.com/JuliaGaussianProcesses/EasyGPs.jl/actions/workflows/CI.yml/badge.svg)](https://github.com/JuliaGaussianProcesses/EasyGPs.jl/actions/workflows/CI.yml) [![Codecov](https://codecov.io/gh/JuliaGaussianProcesses/EasyGPs.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/JuliaGaussianProcesses/EasyGPs.jl/tree/master) +[![Code Style: Blue](https://img.shields.io/badge/code%20style-blue-4495d1.svg)](https://github.com/JuliaDiff/BlueStyle) EasyGPs.jl is a package that defines a high-level API for the JuliaGaussianProcesses ecosystem. It handles model parameterization and training, allowing users to focus on the