From ea69430a76d6b9cb3a3b6350229bf335acad1f00 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Wed, 22 Oct 2025 04:32:54 -0400 Subject: [PATCH 01/14] update vi interface to match AdvancedVI@0.5 --- Project.toml | 2 +- src/Turing.jl | 3 + src/variational/VariationalInference.jl | 80 +++++++++++-------------- src/variational/deprecated.jl | 61 ------------------- test/Project.toml | 2 +- test/runtests.jl | 78 ++++++++++++------------ test/variational/advi.jl | 72 ++++++++-------------- 7 files changed, 106 insertions(+), 192 deletions(-) delete mode 100644 src/variational/deprecated.jl diff --git a/Project.toml b/Project.toml index a2e5f206f4..9a8997ce38 100644 --- a/Project.toml +++ b/Project.toml @@ -55,7 +55,7 @@ Accessors = "0.1" AdvancedHMC = "0.3.0, 0.4.0, 0.5.2, 0.6, 0.7, 0.8" AdvancedMH = "0.8" AdvancedPS = "0.7" -AdvancedVI = "0.4" +AdvancedVI = "0.5" BangBang = "0.4.2" Bijectors = "0.14, 0.15" Compat = "4.15.0" diff --git a/src/Turing.jl b/src/Turing.jl index 0cdbe24586..0d29e1397e 100644 --- a/src/Turing.jl +++ b/src/Turing.jl @@ -117,6 +117,9 @@ export q_locationscale, q_meanfield_gaussian, q_fullrank_gaussian, + KLMinRepGradProxDescent, + KLMinRepGradDescent, + KLMinScoreGradDescent, # ADTypes AutoForwardDiff, AutoReverseDiff, diff --git a/src/variational/VariationalInference.jl b/src/variational/VariationalInference.jl index d516319684..1af8a24eb0 100644 --- a/src/variational/VariationalInference.jl +++ b/src/variational/VariationalInference.jl @@ -1,21 +1,24 @@ module Variational -using DynamicPPL +using AdvancedVI: + AdvancedVI, KLMinRepGradDescent, KLMinRepGradProxDescent, KLMinScoreGradDescent using ADTypes +using Bijectors: Bijectors using Distributions +using DynamicPPL using LinearAlgebra using LogDensityProblems using Random +using ..Turing: DEFAULT_ADTYPE, PROGRESS -import ..Turing: DEFAULT_ADTYPE, PROGRESS - -import AdvancedVI -import Bijectors - -export vi, q_locationscale, q_meanfield_gaussian, q_fullrank_gaussian - -include("deprecated.jl") +export vi, + q_locationscale, + q_meanfield_gaussian, + q_fullrank_gaussian, + KLMinRepGradProxDescent, + KLMinRepGradDescent, + KLMinScoreGradDescent """ q_initialize_scale( @@ -248,76 +251,61 @@ end """ vi( [rng::Random.AbstractRNG,] - model::DynamicPPL.Model; + model::DynamicPPL.Model, q, - n_iterations::Int; - objective::AdvancedVI.AbstractVariationalObjective = AdvancedVI.RepGradELBO( - 10; entropy = AdvancedVI.ClosedFormEntropyZeroGradient() - ), + max_iter::Int; + algorithm::AdvancedVI.AbstractVariationalAlgorithm = KLMinRepGradProxDescent(DEFAULT_ADTYPE; n_samples=10), show_progress::Bool = Turing.PROGRESS[], - optimizer::Optimisers.AbstractRule = AdvancedVI.DoWG(), - averager::AdvancedVI.AbstractAverager = AdvancedVI.PolynomialAveraging(), - operator::AdvancedVI.AbstractOperator = AdvancedVI.ProximalLocationScaleEntropy(), - adtype::ADTypes.AbstractADType = Turing.DEFAULT_ADTYPE, kwargs... ) -Approximating the target `model` via variational inference by optimizing `objective` with the initialization `q`. +Approximate the target `model` via the variational inference algorithm `algorithm` by starting from the initial variational approximation `q`. This is a thin wrapper around `AdvancedVI.optimize`. +The default `algorithm` assumes `q` uses `AdvancedVI.MvLocationScale`, which can be constructed by invoking `q_fullrank_gaussian` or `q_meanfield_gaussian`. +For other variational families, refer to `AdvancedVI` to determine the best algorithm and options. # Arguments - `model`: The target `DynamicPPL.Model`. - `q`: The initial variational approximation. -- `n_iterations`: Number of optimization steps. +- `max_iter`: Maximum number of steps. # Keyword Arguments -- `objective`: Variational objective to be optimized. +- `algorithm`: Variational inference algorithm. - `show_progress`: Whether to show the progress bar. -- `optimizer`: Optimization algorithm. -- `averager`: Parameter averaging strategy. -- `operator`: Operator applied after each optimization step. -- `adtype`: Automatic differentiation backend. +- `adtype`: Automatic differentiation backend to be applied to the log-density. The default value for `algorithm` also uses this backend for differentiation the variational objective. See the docs of `AdvancedVI.optimize` for additional keyword arguments. # Returns -- `q`: Variational distribution formed by the last iterate of the optimization run. -- `q_avg`: Variational distribution formed by the averaged iterates according to `averager`. -- `state`: Collection of states used for optimization. This can be used to resume from a past call to `vi`. -- `info`: Information generated during the optimization run. +- `q`: Output variational distribution of `algorithm`. +- `state`: Collection of states used by `algorithm`. This can be used to resume from a past call to `vi`. +- `info`: Information generated while executing `algorithm`. """ function vi( rng::Random.AbstractRNG, model::DynamicPPL.Model, q, - n_iterations::Int; - objective=AdvancedVI.RepGradELBO( - 10; entropy=AdvancedVI.ClosedFormEntropyZeroGradient() - ), - show_progress::Bool=PROGRESS[], - optimizer=AdvancedVI.DoWG(), - averager=AdvancedVI.PolynomialAveraging(), - operator=AdvancedVI.ProximalLocationScaleEntropy(), + max_iter::Int, + args...; adtype::ADTypes.AbstractADType=DEFAULT_ADTYPE, + algorithm=KLMinRepGradProxDescent(adtype; n_samples=10), + show_progress::Bool=PROGRESS[], kwargs..., ) return AdvancedVI.optimize( rng, - LogDensityFunction(model), - objective, + algorithm, + max_iter, + LogDensityFunction(model; adtype), q, - n_iterations; + args...; show_progress=show_progress, - adtype, - optimizer, - averager, - operator, kwargs..., ) end -function vi(model::DynamicPPL.Model, q, n_iterations::Int; kwargs...) - return vi(Random.default_rng(), model, q, n_iterations; kwargs...) +function vi(model::DynamicPPL.Model, q, max_iter::Int; kwargs...) + return vi(Random.default_rng(), model, q, max_iter; kwargs...) end end diff --git a/src/variational/deprecated.jl b/src/variational/deprecated.jl deleted file mode 100644 index 9a9f4777b5..0000000000 --- a/src/variational/deprecated.jl +++ /dev/null @@ -1,61 +0,0 @@ - -import DistributionsAD -export ADVI - -Base.@deprecate meanfield(model) q_meanfield_gaussian(model) - -struct ADVI{AD} - "Number of samples used to estimate the ELBO in each optimization step." - samples_per_step::Int - "Maximum number of gradient steps." - max_iters::Int - "AD backend used for automatic differentiation." - adtype::AD -end - -function ADVI( - samples_per_step::Int=1, - max_iters::Int=1000; - adtype::ADTypes.AbstractADType=ADTypes.AutoForwardDiff(), -) - Base.depwarn( - "The type ADVI will be removed in future releases. Please refer to the new interface for `vi`", - :ADVI; - force=true, - ) - return ADVI{typeof(adtype)}(samples_per_step, max_iters, adtype) -end - -function vi(model::DynamicPPL.Model, alg::ADVI; kwargs...) - Base.depwarn( - "This specialization along with the type `ADVI` will be deprecated in future releases. Please refer to the new interface for `vi`.", - :vi; - force=true, - ) - q = q_meanfield_gaussian(Random.default_rng(), model) - objective = AdvancedVI.RepGradELBO( - alg.samples_per_step; entropy=AdvancedVI.ClosedFormEntropy() - ) - operator = AdvancedVI.IdentityOperator() - _, q_avg, _, _ = vi(model, q, alg.max_iters; objective, operator, kwargs...) - return q_avg -end - -function vi( - model::DynamicPPL.Model, - alg::ADVI, - q::Bijectors.TransformedDistribution{<:DistributionsAD.TuringDiagMvNormal}; - kwargs..., -) - Base.depwarn( - "This specialization along with the type `ADVI` will be deprecated in future releases. Please refer to the new interface for `vi`.", - :vi; - force=true, - ) - objective = AdvancedVI.RepGradELBO( - alg.samples_per_step; entropy=AdvancedVI.ClosedFormEntropy() - ) - operator = AdvancedVI.IdentityOperator() - _, q_avg, _, _ = vi(model, q, alg.max_iters; objective, operator, kwargs...) - return q_avg -end diff --git a/test/Project.toml b/test/Project.toml index 138b1a1a0d..b03dfd8970 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -44,7 +44,7 @@ AbstractMCMC = "5" AbstractPPL = "0.11, 0.12, 0.13" AdvancedMH = "0.6, 0.7, 0.8" AdvancedPS = "0.7" -AdvancedVI = "0.4" +AdvancedVI = "0.5" Aqua = "0.8" BangBang = "0.4" Bijectors = "0.14, 0.15" diff --git a/test/runtests.jl b/test/runtests.jl index 5fb6b21411..d2535d58a3 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -13,7 +13,7 @@ include("test_utils/models.jl") include("test_utils/numerical_tests.jl") include("test_utils/sampler.jl") -Turing.setprogress!(false) +#Turing.setprogress!(false) included_paths, excluded_paths = parse_args(ARGS) # Filter which tests to run and collect timing and allocations information to show in a @@ -30,55 +30,59 @@ macro timeit_include(path::AbstractString) end @testset "Turing" verbose = true begin - @testset "Aqua" begin - @timeit_include("Aqua.jl") - end + # @testset "Aqua" begin + # @timeit_include("Aqua.jl") + # end - @testset "AD" verbose = true begin - @timeit_include("ad.jl") - end + # @testset "AD" verbose = true begin + # @timeit_include("ad.jl") + # end - @testset "essential" verbose = true begin - @timeit_include("essential/container.jl") - end + # @testset "essential" verbose = true begin + # @timeit_include("essential/container.jl") + # end - @testset "samplers (without AD)" verbose = true begin - @timeit_include("mcmc/particle_mcmc.jl") - @timeit_include("mcmc/emcee.jl") - @timeit_include("mcmc/ess.jl") - @timeit_include("mcmc/is.jl") - end + # @testset "samplers (without AD)" verbose = true begin + # @timeit_include("mcmc/particle_mcmc.jl") + # @timeit_include("mcmc/emcee.jl") + # @timeit_include("mcmc/ess.jl") + # @timeit_include("mcmc/is.jl") + # end @timeit TIMEROUTPUT "inference" begin - @testset "inference with samplers" verbose = true begin - @timeit_include("mcmc/gibbs.jl") - @timeit_include("mcmc/hmc.jl") - @timeit_include("mcmc/Inference.jl") - @timeit_include("mcmc/sghmc.jl") - @timeit_include("mcmc/external_sampler.jl") - @timeit_include("mcmc/mh.jl") - @timeit_include("ext/dynamichmc.jl") - @timeit_include("mcmc/repeat_sampler.jl") - end + # @testset "inference with samplers" verbose = true begin + # @timeit_include("mcmc/gibbs.jl") + # @timeit_include("mcmc/hmc.jl") + # @timeit_include("mcmc/Inference.jl") + # @timeit_include("mcmc/sghmc.jl") + # @timeit_include("mcmc/external_sampler.jl") + # @timeit_include("mcmc/mh.jl") + # @timeit_include("ext/dynamichmc.jl") + # @timeit_include("mcmc/repeat_sampler.jl") + # end @testset "variational algorithms" begin @timeit_include("variational/advi.jl") end - @testset "mode estimation" verbose = true begin - @timeit_include("optimisation/Optimisation.jl") - @timeit_include("ext/OptimInterface.jl") - end + # @testset "mode estimation" verbose = true begin + # @timeit_include("optimisation/Optimisation.jl") + # @timeit_include("ext/OptimInterface.jl") + # end end - @testset "stdlib" verbose = true begin - @timeit_include("stdlib/distributions.jl") - @timeit_include("stdlib/RandomMeasures.jl") - end + # @testset "variational optimisers" begin + # @timeit_include("variational/optimisers.jl") + # end - @testset "utilities" begin - @timeit_include("mcmc/utilities.jl") - end + # @testset "stdlib" verbose = true begin + # @timeit_include("stdlib/distributions.jl") + # @timeit_include("stdlib/RandomMeasures.jl") + # end + + # @testset "utilities" begin + # @timeit_include("mcmc/utilities.jl") + # end end show(TIMEROUTPUT; compact=true, sortby=:firstexec) diff --git a/test/variational/advi.jl b/test/variational/advi.jl index ed8f745df2..2ab5d2b424 100644 --- a/test/variational/advi.jl +++ b/test/variational/advi.jl @@ -10,12 +10,16 @@ using Distributions: Dirichlet, Normal using LinearAlgebra using MCMCChains: Chains using Random +using ReverseDiff using StableRNGs: StableRNG using Test: @test, @testset using Turing using Turing.Variational @testset "ADVI" begin + adtype = AutoReverseDiff() + operator = AdvancedVI.ClipScale() + @testset "q initialization" begin m = gdemo_default d = length(Turing.DynamicPPL.VarInfo(m)[:]) @@ -41,86 +45,62 @@ using Turing.Variational @testset "default interface" begin for q0 in [q_meanfield_gaussian(gdemo_default), q_fullrank_gaussian(gdemo_default)] - _, q, _, _ = vi(gdemo_default, q0, 100; show_progress=Turing.PROGRESS[]) + q, _, _ = vi(gdemo_default, q0, 100; show_progress=Turing.PROGRESS[], adtype) c1 = rand(q, 10) end end - @testset "custom interface $name" for (name, objective, operator, optimizer) in [ - ( - "ADVI with closed-form entropy", - AdvancedVI.RepGradELBO(10), - AdvancedVI.ProximalLocationScaleEntropy(), - AdvancedVI.DoG(), - ), + @testset "custom algorithm $name" for (name, algorithm) in [ ( - "ADVI with proximal entropy", - AdvancedVI.RepGradELBO(10; entropy=AdvancedVI.ClosedFormEntropyZeroGradient()), - AdvancedVI.ClipScale(), - AdvancedVI.DoG(), + "KLMinRepGradProxDescent", + KLMinRepGradProxDescent(AutoReverseDiff(); n_samples=10), ), ( - "ADVI with STL entropy", - AdvancedVI.RepGradELBO(10; entropy=AdvancedVI.StickingTheLandingEntropy()), - AdvancedVI.ClipScale(), - AdvancedVI.DoG(), + "KLMinRepGradDescent", + KLMinRepGradDescent(AutoReverseDiff(); operator, n_samples=10), ), ] T = 1000 - q, q_avg, _, _ = vi( + q, _, _ = vi( gdemo_default, q_meanfield_gaussian(gdemo_default), T; - objective, - optimizer, - operator, + algorithm, + adtype, show_progress=Turing.PROGRESS[], ) - N = 1000 - c1 = rand(q_avg, N) c2 = rand(q, N) end - @testset "inference $name" for (name, objective, operator, optimizer) in [ + @testset "inference $name" for (name, algorithm) in [ ( - "ADVI with closed-form entropy", - AdvancedVI.RepGradELBO(10), - AdvancedVI.ProximalLocationScaleEntropy(), - AdvancedVI.DoG(), + "KLMinRepGradProxDescent", + KLMinRepGradProxDescent(AutoReverseDiff(); n_samples=10), ), ( - "ADVI with proximal entropy", - RepGradELBO(10; entropy=AdvancedVI.ClosedFormEntropyZeroGradient()), - AdvancedVI.ClipScale(), - AdvancedVI.DoG(), - ), - ( - "ADVI with STL entropy", - AdvancedVI.RepGradELBO(10; entropy=AdvancedVI.StickingTheLandingEntropy()), - AdvancedVI.ClipScale(), - AdvancedVI.DoG(), + "KLMinRepGradDescent", + KLMinRepGradDescent(AutoReverseDiff(); operator, n_samples=10), ), ] rng = StableRNG(0x517e1d9bf89bf94f) T = 1000 - q, q_avg, _, _ = vi( + q, _, _ = vi( rng, gdemo_default, q_meanfield_gaussian(gdemo_default), T; - optimizer, + algorithm, + adtype, show_progress=Turing.PROGRESS[], ) N = 1000 - for q_out in [q_avg, q] - samples = transpose(rand(rng, q_out, N)) - chn = Chains(reshape(samples, size(samples)..., 1), ["s", "m"]) + samples = transpose(rand(rng, q, N)) + chn = Chains(reshape(samples, size(samples)..., 1), ["s", "m"]) - check_gdemo(chn; atol=0.5) - end + check_gdemo(chn; atol=0.5) end # regression test for: @@ -143,7 +123,7 @@ using Turing.Variational @test all(x0 .≈ x0_inv) # And regression for https://github.com/TuringLang/Turing.jl/issues/2160. - _, q, _, _ = vi(rng, m, q_meanfield_gaussian(m), 1000) + q, _, _ = vi(rng, m, q_meanfield_gaussian(m), 1000; adtype) x = rand(rng, q, 1000) @test mean(eachcol(x)) ≈ [0.5, 0.5] atol = 0.1 end @@ -158,7 +138,7 @@ using Turing.Variational end model = demo_issue2205() | (y=1.0,) - _, q, _, _ = vi(rng, model, q_meanfield_gaussian(model), 1000) + q, _, _ = vi(rng, model, q_meanfield_gaussian(model), 1000; adtype) # True mean. mean_true = 1 / 2 var_true = 1 / 2 From 86ee6dd5e1ec64f39e451794375a89122905b7d3 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Wed, 22 Oct 2025 04:34:51 -0400 Subject: [PATCH 02/14] revert unintended commit of `runtests.jl` --- test/runtests.jl | 78 +++++++++++++++++++++++------------------------- 1 file changed, 37 insertions(+), 41 deletions(-) diff --git a/test/runtests.jl b/test/runtests.jl index d2535d58a3..5fb6b21411 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -13,7 +13,7 @@ include("test_utils/models.jl") include("test_utils/numerical_tests.jl") include("test_utils/sampler.jl") -#Turing.setprogress!(false) +Turing.setprogress!(false) included_paths, excluded_paths = parse_args(ARGS) # Filter which tests to run and collect timing and allocations information to show in a @@ -30,59 +30,55 @@ macro timeit_include(path::AbstractString) end @testset "Turing" verbose = true begin - # @testset "Aqua" begin - # @timeit_include("Aqua.jl") - # end + @testset "Aqua" begin + @timeit_include("Aqua.jl") + end - # @testset "AD" verbose = true begin - # @timeit_include("ad.jl") - # end + @testset "AD" verbose = true begin + @timeit_include("ad.jl") + end - # @testset "essential" verbose = true begin - # @timeit_include("essential/container.jl") - # end + @testset "essential" verbose = true begin + @timeit_include("essential/container.jl") + end - # @testset "samplers (without AD)" verbose = true begin - # @timeit_include("mcmc/particle_mcmc.jl") - # @timeit_include("mcmc/emcee.jl") - # @timeit_include("mcmc/ess.jl") - # @timeit_include("mcmc/is.jl") - # end + @testset "samplers (without AD)" verbose = true begin + @timeit_include("mcmc/particle_mcmc.jl") + @timeit_include("mcmc/emcee.jl") + @timeit_include("mcmc/ess.jl") + @timeit_include("mcmc/is.jl") + end @timeit TIMEROUTPUT "inference" begin - # @testset "inference with samplers" verbose = true begin - # @timeit_include("mcmc/gibbs.jl") - # @timeit_include("mcmc/hmc.jl") - # @timeit_include("mcmc/Inference.jl") - # @timeit_include("mcmc/sghmc.jl") - # @timeit_include("mcmc/external_sampler.jl") - # @timeit_include("mcmc/mh.jl") - # @timeit_include("ext/dynamichmc.jl") - # @timeit_include("mcmc/repeat_sampler.jl") - # end + @testset "inference with samplers" verbose = true begin + @timeit_include("mcmc/gibbs.jl") + @timeit_include("mcmc/hmc.jl") + @timeit_include("mcmc/Inference.jl") + @timeit_include("mcmc/sghmc.jl") + @timeit_include("mcmc/external_sampler.jl") + @timeit_include("mcmc/mh.jl") + @timeit_include("ext/dynamichmc.jl") + @timeit_include("mcmc/repeat_sampler.jl") + end @testset "variational algorithms" begin @timeit_include("variational/advi.jl") end - # @testset "mode estimation" verbose = true begin - # @timeit_include("optimisation/Optimisation.jl") - # @timeit_include("ext/OptimInterface.jl") - # end + @testset "mode estimation" verbose = true begin + @timeit_include("optimisation/Optimisation.jl") + @timeit_include("ext/OptimInterface.jl") + end end - # @testset "variational optimisers" begin - # @timeit_include("variational/optimisers.jl") - # end - - # @testset "stdlib" verbose = true begin - # @timeit_include("stdlib/distributions.jl") - # @timeit_include("stdlib/RandomMeasures.jl") - # end + @testset "stdlib" verbose = true begin + @timeit_include("stdlib/distributions.jl") + @timeit_include("stdlib/RandomMeasures.jl") + end - # @testset "utilities" begin - # @timeit_include("mcmc/utilities.jl") - # end + @testset "utilities" begin + @timeit_include("mcmc/utilities.jl") + end end show(TIMEROUTPUT; compact=true, sortby=:firstexec) From 5ceb7e6f88edf412f38bcabc9a6b1890698ea22d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 23 Oct 2025 20:56:23 +0100 Subject: [PATCH 03/14] CompatHelper: add new compat entry for DynamicPPL at version 0.38 for package test, (keep existing compat) (#2701) Co-authored-by: CompatHelper Julia --- test/Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/test/Project.toml b/test/Project.toml index 435f8cc5f2..2b5b124b59 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -53,6 +53,7 @@ Combinatorics = "1" Distributions = "0.25" DistributionsAD = "0.6.3" DynamicHMC = "2.1.6, 3.0" +DynamicPPL = "0.38" FiniteDifferences = "0.10.8, 0.11, 0.12" ForwardDiff = "0.10.12 - 0.10.32, 0.10, 1" HypothesisTests = "0.11" From d87004562aad8881eed0abe653a7d55ab85a5105 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:08:18 -0400 Subject: [PATCH 04/14] update docs for `vi` --- src/variational/VariationalInference.jl | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/src/variational/VariationalInference.jl b/src/variational/VariationalInference.jl index 1af8a24eb0..630d3b62f2 100644 --- a/src/variational/VariationalInference.jl +++ b/src/variational/VariationalInference.jl @@ -254,14 +254,17 @@ end model::DynamicPPL.Model, q, max_iter::Int; - algorithm::AdvancedVI.AbstractVariationalAlgorithm = KLMinRepGradProxDescent(DEFAULT_ADTYPE; n_samples=10), + adtype::ADTypes.AbstractADType=DEFAULT_ADTYPE, + algorithm::AdvancedVI.AbstractVariationalAlgorithm = KLMinRepGradProxDescent( + adtype; n_samples=10 + ), show_progress::Bool = Turing.PROGRESS[], kwargs... ) Approximate the target `model` via the variational inference algorithm `algorithm` by starting from the initial variational approximation `q`. This is a thin wrapper around `AdvancedVI.optimize`. -The default `algorithm` assumes `q` uses `AdvancedVI.MvLocationScale`, which can be constructed by invoking `q_fullrank_gaussian` or `q_meanfield_gaussian`. +The default `algorithm`, `KLMinRepGradProxDescent` ([relevant docs](https://turinglang.org/AdvancedVI.jl/dev/klminrepgradproxdescent/)), assumes `q` uses `AdvancedVI.MvLocationScale`, which can be constructed by invoking `q_fullrank_gaussian` or `q_meanfield_gaussian`. For other variational families, refer to `AdvancedVI` to determine the best algorithm and options. # Arguments @@ -270,9 +273,9 @@ For other variational families, refer to `AdvancedVI` to determine the best algo - `max_iter`: Maximum number of steps. # Keyword Arguments +- `adtype`: Automatic differentiation backend to be applied to the log-density. The default value for `algorithm` also uses this backend for differentiation the variational objective. - `algorithm`: Variational inference algorithm. - `show_progress`: Whether to show the progress bar. -- `adtype`: Automatic differentiation backend to be applied to the log-density. The default value for `algorithm` also uses this backend for differentiation the variational objective. See the docs of `AdvancedVI.optimize` for additional keyword arguments. @@ -288,7 +291,9 @@ function vi( max_iter::Int, args...; adtype::ADTypes.AbstractADType=DEFAULT_ADTYPE, - algorithm=KLMinRepGradProxDescent(adtype; n_samples=10), + algorithm::AdvancedVI.AbstractVariationalAlgorithm=KLMinRepGradProxDescent( + adtype; n_samples=10 + ), show_progress::Bool=PROGRESS[], kwargs..., ) From 2d928e0a5e731cfa54eb6a02e09a3401bc5f5ff2 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:17:04 -0400 Subject: [PATCH 05/14] add history entry for `AdvancedVI@0.5` --- HISTORY.md | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/HISTORY.md b/HISTORY.md index dc66f1f496..83686b257b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,5 +1,19 @@ + # 0.42.0 +## Breaking Changes + +**AdvancedVI 0.5** + +Turing.jl v0.42 updates `AdvancedVI.jl` compatibility to 0.5. +Most of the changes introduced in `AdvancedVI.jl@0.5` are structural, with some changes spilling out into the interface. +The summary of the changes below are the things that affect the end-users of Turing. +For a more comprehensive list of changes, please refer to the [changelogs](https://github.com/TuringLang/AdvancedVI.jl/blob/main/HISTORY.md) in `AdvancedVI`. + +- A new level of interface for defining different variational algorithms have been introduced in `AdvancedVI` v0.5. As a result, the method `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. +- The default hyperparameters of `DoG`and `DoWG` have been altered. +- The depricated `AdvancedVI@0.2`-era interface is now removed. + # 0.41.0 ## DynamicPPL 0.38 @@ -62,7 +76,7 @@ Note that if the initial sample is included, the corresponding sampler statistic Due to a technical limitation of MCMCChains.jl, this causes all indexing into MCMCChains to return `Union{Float64, Missing}` or similar. If you want the old behaviour, you can discard the first sample (e.g. using `discard_initial=1`). -# 0.40.5 +# 0.4# 0.40.5 Bump Optimization.jl compatibility to include v5. From 5211b37286302a2f850fa19f025cd3dbf3cd0f2a Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:18:40 -0400 Subject: [PATCH 06/14] remove export for removed symbol --- src/Turing.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/Turing.jl b/src/Turing.jl index 98cfcf29c7..a4f40df259 100644 --- a/src/Turing.jl +++ b/src/Turing.jl @@ -116,7 +116,6 @@ export externalsampler, # Variational inference - AdvancedVI vi, - ADVI, q_locationscale, q_meanfield_gaussian, q_fullrank_gaussian, From f0d615d92ddc6763a4ea38b481b1da04e0cd8e97 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:24:13 -0400 Subject: [PATCH 07/14] fix formatting Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- HISTORY.md | 1 - 1 file changed, 1 deletion(-) diff --git a/HISTORY.md b/HISTORY.md index 83686b257b..6e0a620ba0 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,4 +1,3 @@ - # 0.42.0 ## Breaking Changes From 1b2351f2501a6ae143c100ffd7169c6a60ced58f Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:24:21 -0400 Subject: [PATCH 08/14] fix formatting Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- HISTORY.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/HISTORY.md b/HISTORY.md index 6e0a620ba0..3327481194 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -9,9 +9,9 @@ Most of the changes introduced in `AdvancedVI.jl@0.5` are structural, with some The summary of the changes below are the things that affect the end-users of Turing. For a more comprehensive list of changes, please refer to the [changelogs](https://github.com/TuringLang/AdvancedVI.jl/blob/main/HISTORY.md) in `AdvancedVI`. -- A new level of interface for defining different variational algorithms have been introduced in `AdvancedVI` v0.5. As a result, the method `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. -- The default hyperparameters of `DoG`and `DoWG` have been altered. -- The depricated `AdvancedVI@0.2`-era interface is now removed. + - A new level of interface for defining different variational algorithms have been introduced in `AdvancedVI` v0.5. As a result, the method `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. + - The default hyperparameters of `DoG`and `DoWG` have been altered. + - The depricated `AdvancedVI@0.2`-era interface is now removed. # 0.41.0 From 2be31b4e69a7c485c269b6a055ee007f955ba1a9 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:27:57 -0400 Subject: [PATCH 09/14] tidy tests advi --- test/variational/advi.jl | 20 ++++---------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/test/variational/advi.jl b/test/variational/advi.jl index 2ab5d2b424..b426f0e6a3 100644 --- a/test/variational/advi.jl +++ b/test/variational/advi.jl @@ -51,14 +51,8 @@ using Turing.Variational end @testset "custom algorithm $name" for (name, algorithm) in [ - ( - "KLMinRepGradProxDescent", - KLMinRepGradProxDescent(AutoReverseDiff(); n_samples=10), - ), - ( - "KLMinRepGradDescent", - KLMinRepGradDescent(AutoReverseDiff(); operator, n_samples=10), - ), + ("KLMinRepGradProxDescent", KLMinRepGradProxDescent(adtype; n_samples=10)), + ("KLMinRepGradDescent", KLMinRepGradDescent(adtype; operator, n_samples=10)), ] T = 1000 q, _, _ = vi( @@ -74,14 +68,8 @@ using Turing.Variational end @testset "inference $name" for (name, algorithm) in [ - ( - "KLMinRepGradProxDescent", - KLMinRepGradProxDescent(AutoReverseDiff(); n_samples=10), - ), - ( - "KLMinRepGradDescent", - KLMinRepGradDescent(AutoReverseDiff(); operator, n_samples=10), - ), + ("KLMinRepGradProxDescent", KLMinRepGradProxDescent(adtype; n_samples=10)), + ("KLMinRepGradDescent", KLMinRepGradDescent(adtype; operator, n_samples=10)), ] rng = StableRNG(0x517e1d9bf89bf94f) From e48ae42e38ab3f966304283cbadabac0c98a84df Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 00:29:12 -0400 Subject: [PATCH 10/14] fix rename file `advi.jl` to `vi.jl` to reflect naming changes --- test/variational/{advi.jl => vi.jl} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename test/variational/{advi.jl => vi.jl} (100%) diff --git a/test/variational/advi.jl b/test/variational/vi.jl similarity index 100% rename from test/variational/advi.jl rename to test/variational/vi.jl From 44f776255b01b9c1d050a8e3d65fcb3502e21807 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 22:38:35 -0400 Subject: [PATCH 11/14] fix docs Co-authored-by: Markus Hauru --- HISTORY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HISTORY.md b/HISTORY.md index 3327481194..cd796e8ec7 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -9,7 +9,7 @@ Most of the changes introduced in `AdvancedVI.jl@0.5` are structural, with some The summary of the changes below are the things that affect the end-users of Turing. For a more comprehensive list of changes, please refer to the [changelogs](https://github.com/TuringLang/AdvancedVI.jl/blob/main/HISTORY.md) in `AdvancedVI`. - - A new level of interface for defining different variational algorithms have been introduced in `AdvancedVI` v0.5. As a result, the method `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. + - A new level of interface for defining different variational algorithms has been introduced in `AdvancedVI` v0.5. As a result, the function `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. - The default hyperparameters of `DoG`and `DoWG` have been altered. - The depricated `AdvancedVI@0.2`-era interface is now removed. From fd0e9286140775b441b918c9a44599c575718c80 Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 22:38:51 -0400 Subject: [PATCH 12/14] fix HISTORY.md Co-authored-by: Markus Hauru --- HISTORY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HISTORY.md b/HISTORY.md index cd796e8ec7..341814e6bf 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -75,7 +75,7 @@ Note that if the initial sample is included, the corresponding sampler statistic Due to a technical limitation of MCMCChains.jl, this causes all indexing into MCMCChains to return `Union{Float64, Missing}` or similar. If you want the old behaviour, you can discard the first sample (e.g. using `discard_initial=1`). -# 0.4# 0.40.5 +# 0.40.5 Bump Optimization.jl compatibility to include v5. From 77276bdc6c048fba4be1343ca745361c9d20145a Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 22:39:03 -0400 Subject: [PATCH 13/14] fix HISTORY.md Co-authored-by: Markus Hauru --- HISTORY.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/HISTORY.md b/HISTORY.md index 341814e6bf..2f75200af3 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -11,7 +11,7 @@ For a more comprehensive list of changes, please refer to the [changelogs](https - A new level of interface for defining different variational algorithms has been introduced in `AdvancedVI` v0.5. As a result, the function `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. - The default hyperparameters of `DoG`and `DoWG` have been altered. - - The depricated `AdvancedVI@0.2`-era interface is now removed. + - The deprecated `AdvancedVI@0.2`-era interface is now removed. # 0.41.0 From e70ddb4fc8c7a884440a6622524a526dcf73c8bd Mon Sep 17 00:00:00 2001 From: Kyurae Kim Date: Fri, 24 Oct 2025 22:46:11 -0400 Subject: [PATCH 14/14] update history --- HISTORY.md | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/HISTORY.md b/HISTORY.md index 3327481194..ba92be1ffd 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -9,7 +9,44 @@ Most of the changes introduced in `AdvancedVI.jl@0.5` are structural, with some The summary of the changes below are the things that affect the end-users of Turing. For a more comprehensive list of changes, please refer to the [changelogs](https://github.com/TuringLang/AdvancedVI.jl/blob/main/HISTORY.md) in `AdvancedVI`. - - A new level of interface for defining different variational algorithms have been introduced in `AdvancedVI` v0.5. As a result, the method `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. +A new level of interface for defining different variational algorithms have been introduced in `AdvancedVI` v0.5. As a result, the method `Turing.vi` now receives a keyword argument `algorithm`. The object `algorithm <: AdvancedVI.AbstractVariationalAlgorithm` should now contain all the algorithm-specific configurations. Therefore, keyword arguments of `vi` that were algorithm-specific such as `objective`, `operator`, `averager` and so on, have been moved as fields of the relevant `<: AdvancedVI.AbstractVariationalAlgorithm` structs. +For example, + +```julia +vi(model, q, n_iters; objective=RepGradELBO(10), operator=AdvancedVI.ClipScale()) +``` + +is now + +```julia +vi( + model, + q, + n_iters; + algorithm=KLMinRepGradDescent(adtype; n_samples=10, operator=AdvancedVI.ClipScale()), +) +``` + +Similarly, + +```julia +vi( + model, + q, + n_iters; + objective=RepGradELBO(10; entropy=AdvancedVI.ClosedFormEntropyZeroGradient()), + operator=AdvancedVI.ProximalLocationScaleEntropy(), +) +``` + +is now + +```julia +vi(model, q, n_iters; algorithm=KLMinRepGradProxDescent(adtype; n_samples=10)) +``` + +Additionally, + - The default hyperparameters of `DoG`and `DoWG` have been altered. - The depricated `AdvancedVI@0.2`-era interface is now removed.