Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update Examples #905

Merged
merged 4 commits into from
Sep 26, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 13 additions & 26 deletions examples/ApproximateConvolution.jl
Original file line number Diff line number Diff line change
@@ -1,19 +1,15 @@
## Include needed packages
using IncrementalInference
using RoMEPlotting


using IncrementalInference, KernelDensityEstimate, Distributions
using Gadfly # for draw PDF
using Test

# import getSample to be extended for user factor MultiModalConditional
import IncrementalInference: getSample

# switch off y ticks
toggleYTicks()


## create a new facor type MultiModalConditional
mutable struct MultiModalConditional <: AbstractRelativeFactor
x::Vector{Distribution}
hypo::Categorical
MultiModalConditional{D <: Distribution}(x::Vector{D}, p::Categorical) = new(x, p)
MultiModalConditional(x::Vector{<:Distribution}, p::Categorical) = new(x, p)
end
function getSample(dpl::MultiModalConditional, N::Int=1)
d = length(dpl.hypo.p)
Expand All @@ -26,6 +22,7 @@ function getSample(dpl::MultiModalConditional, N::Int=1)
end

function (dp::MultiModalConditional)(res::AbstractVector{<:Real},
userdata::FactorMetadata,
idx::Int,
meas::Tuple{<:AbstractArray{<:Real,2},<:AbstractVector{Int64}},
x1::AbstractArray{<:Real},
Expand All @@ -36,42 +33,32 @@ function (dp::MultiModalConditional)(res::AbstractVector{<:Real},
end





## build factor graph and populate
fg = initfg()

N=100

doors = [-20.0, 0.0, 20.0]'
doors = [-20.0 0.0 20.0]
pd = kde!(doors,[2.0])
pd = resample(pd,N);
bws = getBW(pd)[:,1]
doors2 = getPoints(pd);
v1 = addVariable!(fg,:x1,doors,N=N)
f1 = addFactor!(fg,[v1],Obsv2( doors2, bws', [1.0])) #, samplefnc=getSample

v1 = addVariable!(fg,:x1,ContinuousScalar,N=N)
f1 = addFactor!(fg,[v1],Prior(pd)) #, samplefnc=getSample

# not initialized
v2 = addVariable!(fg,:x2, N=N)
v2 = addVariable!(fg,:x2, ContinuousScalar, N=N)

mmc = MultiModalConditional([Normal(-5,0.5),Normal(5,0.5)],Categorical([0.5,0.5]))
f2 = addFactor!(fg, [:x1; :x2], mmc )


# Graphs.plot(fg.g)


pts = approxConv(fg, :x1x2f1, :x2)


## do some plotting
q2 = kde!(getSample(mmc,2000)[1])
h1 = plotKDE([getBelief(v1), q2],c=["red";"green"],fill=true, xlbl="")
h2 = plotKDE(kde!(pts),fill=true,xlbl="", title="N = 100")



draw(PDF("approxconv.pdf",14cm,10cm),vstack(h1,h2))
# @async run(`evince approxconv.pdf`)

#
22 changes: 12 additions & 10 deletions examples/BayesTreeIllustration.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

# the multimodal iSAM library
using IncrementalInference

using RoMEPlotting
# build some factor graph
fg = initfg()
addVariable!(fg, :x0, ContinuousScalar)
Expand All @@ -17,37 +17,39 @@ addFactor!(fg, [:x1, :x2], mmo)


# show the factor graph
writeGraphPdf(fg, show=true)
drawGraph(fg, show=true)
# show the tree
tree = wipeBuildNewTree!(fg, drawpdf=true, show=true)


# solve the factor graph and show solving progress on tree in src/JunctionTree.jl
tree = batchSolve!(fg, drawpdf=true, show=true)
fg.solverParams.showtree = true
fg.solverParams.drawtree = true
tree, smt, hist = solveTree!(fg)


## building a new tree -- as per IIF.prepBatchTree(...)

IIF.resetFactorGraphNewTree!(fg)
resetFactorGraphNewTree!(fg)

# Look at variable ordering used to build the Bayes net/tree
p = IIF.getEliminationOrder(fg, ordering=:qr)
p = getEliminationOrder(fg, ordering=:qr)


fge = deepcopy(fg)

# Building Bayes net.
IIF.buildBayesNet!(fge, p)
buildBayesNet!(fge, p)

# prep and build tree
tree = emptyBayesTree()
IIF.buildTree!(tree, fge, p)
buildTree!(tree, fge, p)

# Find potential functions for each clique
cliq = tree.cliques[1] # start at the root
IIF.buildCliquePotentials(fg, tree, cliq);
buildCliquePotentials(fg, tree, cliq);

IIF.drawTree(tree, show=true)
drawTree(tree, show=true)

# println("Bayes Net")
# sleep(0.1)
Expand All @@ -60,6 +62,6 @@ IIF.drawTree(tree, show=true)

cliq = tree.cliques[1]
cliq = getClique(tree, :x0) # where is :x0 a frontal variable
spyCliqMat()
spyCliqMat(cliq)

tree = drawTree(tree, show=true, imgs=true)
36 changes: 2 additions & 34 deletions examples/IllustrateAutoInit.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,36 +2,6 @@

using IncrementalInference

import IncrementalInference: getSample

## MARKER START is a required block of code, but can be reviewed at the end of the tutorial,
# Run this but skip past these user defined functions for a quicker introduction======================


# and a bi-modal conditional function

## TODO: NEW STANDARD FEATURE USE addFactor!(fg, .., multihypo=[...]), or MixtureLinearConditional
# struct MixtureConditional <: AbstractRelativeFactor
# z::Vector{Distribution}
# c::Categorical
# end
# getSample(s::MixtureConditional, N::Int=1) = (rand.(s.z, N)..., rand(s.c, N))
# function (s::MixtureConditional)(res::Array{<:Real},
# userdata::FactorMetadata,
# idx::Int,
# meas::Tuple,
# X1::Array{<:Real,2},
# X2::Array{<:Real,2}
# )
# res[1] = meas[meas[end][idx]][idx] - (X2[1,idx] - X1[1,idx])
# nothing
# end

## Define a model with these user defined functions ===============================================
## MARKER END



# Start with an empty graph
fg = initfg()

Expand Down Expand Up @@ -87,7 +57,7 @@ plotKDE(fg, [:x0, :x1])
# add another node, but introduce more general beliefs
addVariable!(fg, :x2, ContinuousScalar)

mmo = MixtureConditional([Rayleigh(3); Uniform(30,55)], Categorical([0.4; 0.6]))
mmo = MixtureLinearConditional([Rayleigh(3); Uniform(30,55)], Categorical([0.4; 0.6]))
addFactor!(fg, [:x1, :x2], mmo)

# Graphs.plot(fg.g)
Expand Down Expand Up @@ -123,9 +93,7 @@ plotKDE(fg, [:x0, :x1, :x2, :x3])

# Find global best likelihood solution (posterior belief)
# After defining the problem, we can find the 'minimum free energy' solution
tree = wipeBuildNewTree!(fg)
inferOverTree!(fg, tree)

tree, smt, hist = solveTree!(fg)

# and look at the posterior belief, and notice which consensus modes stand out in the posterior
plotKDE(fg, [:x0, :x1, :x2, :x3])
Expand Down
2 changes: 1 addition & 1 deletion examples/TreeAnalysis.jl
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ amd_tree_cost02 = getTreeCost_02(amd_tree)

# Get CCOLAMD variable ordering. First bring in CCOLAMD.
include(normpath(Base.find_package("IncrementalInference"), "..", "ccolamd.jl"))
A, varsym, fctsym = getAdjacencyMatrixSparse(fg)
A, varsym, fctsym = getBiadjacencyMatrix(fg)
colamd_ordering = varsym[Ccolamd.ccolamd(A)]
colamd_tree = resetBuildTreeFromOrder!(deepcopy(fg), colamd_ordering)
colamd_tree_nnz = nnzTree(colamd_tree)
Expand Down