diff --git a/.github/workflows/Tests.yml b/.github/workflows/Tests.yml index 9416cb68a..7ef2ad36f 100644 --- a/.github/workflows/Tests.yml +++ b/.github/workflows/Tests.yml @@ -8,25 +8,35 @@ on: jobs: test: + # Use matrix.test.name here to avoid it taking up the entire window width + name: test ${{matrix.test.name}} (${{ matrix.os }}, ${{ matrix.version }}, ${{ matrix.arch }}, ${{ matrix.num_threads }}) runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.version == 'nightly' }} strategy: fail-fast: false matrix: - test-args: + test: # Run some of the slower test files individually. The last one catches everything # not included in the others. - - "essential/ad.jl" - - "mcmc/gibbs.jl" - - "mcmc/hmc.jl" - - "mcmc/abstractmcmc.jl" - - "mcmc/Inference.jl" - - "experimental/gibbs.jl" - - "mcmc/ess.jl" - - "--skip essential/ad.jl mcmc/gibbs.jl mcmc/hmc.jl mcmc/abstractmcmc.jl mcmc/Inference.jl experimental/gibbs.jl mcmc/ess.jl" + - name: "essential/ad" + args: "essential/ad.jl" + - name: "mcmc/gibbs" + args: "mcmc/gibbs.jl" + - name: "mcmc/hmc" + args: "mcmc/hmc.jl" + - name: "mcmc/abstractmcmc" + args: "mcmc/abstractmcmc.jl" + - name: "mcmc/Inference" + args: "mcmc/Inference.jl" + - name: "experimental/gibbs" + args: "experimental/gibbs.jl" + - name: "mcmc/ess" + args: "mcmc/ess.jl" + - name: "everything else" + args: "--skip essential/ad.jl mcmc/gibbs.jl mcmc/hmc.jl mcmc/abstractmcmc.jl mcmc/Inference.jl experimental/gibbs.jl mcmc/ess.jl" version: - - '1.7' + - '1.10' - '1' os: - ubuntu-latest @@ -39,7 +49,7 @@ jobs: - 1 - 2 exclude: - # With Windows and macOS, only run Julia 1.7, x64, 2 threads. We just want to see + # With Windows and macOS, only run x64, 2 threads. We just want to see # some combination work on OSes other than Ubuntu. - os: windows-latest version: '1' @@ -53,11 +63,11 @@ jobs: num_threads: 1 - os: macOS-latest num_threads: 1 - # It's sufficient to test x86 with one version of Julia and one thread. - - version: '1' - arch: x86 - - num_threads: 2 - arch: x86 + # It's sufficient to test x86 with only Julia 1.10 and 1 thread. + - arch: x86 + version: '1' + - arch: x86 + num_threads: 2 steps: - name: Print matrix variables @@ -66,7 +76,7 @@ jobs: echo "Architecture: ${{ matrix.arch }}" echo "Julia version: ${{ matrix.version }}" echo "Number of threads: ${{ matrix.num_threads }}" - echo "Test arguments: ${{ matrix.test-args }}" + echo "Test arguments: ${{ matrix.test.args }}" - name: (De)activate coverage analysis run: echo "COVERAGE=${{ matrix.version == '1' && matrix.os == 'ubuntu-latest' && matrix.num_threads == 2 }}" >> "$GITHUB_ENV" shell: bash @@ -81,7 +91,7 @@ jobs: # Custom calls of Pkg.test tend to miss features such as e.g. adjustments for CompatHelper PRs # Ref https://github.com/julia-actions/julia-runtest/pull/73 - name: Call Pkg.test - run: julia --color=yes --inline=yes --depwarn=yes --check-bounds=yes --threads=${{ matrix.num_threads }} --project=@. -e 'import Pkg; Pkg.test(; coverage=parse(Bool, ENV["COVERAGE"]), test_args=ARGS)' -- ${{ matrix.test-args }} + run: julia --color=yes --inline=yes --depwarn=yes --check-bounds=yes --threads=${{ matrix.num_threads }} --project=@. -e 'import Pkg; Pkg.test(; coverage=parse(Bool, ENV["COVERAGE"]), test_args=ARGS)' -- ${{ matrix.test.args }} - uses: julia-actions/julia-processcoverage@v1 if: ${{ env.COVERAGE }} - uses: codecov/codecov-action@v4 diff --git a/HISTORY.md b/HISTORY.md index 5b1cad0ed..3bc362d2b 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,14 @@ +# Release 0.35.0 + +## Breaking changes + +Julia 1.10 is now the minimum required version for Turing. + +Tapir.jl has been removed and replaced with its successor, Mooncake.jl. +You can use Mooncake.jl by passing `adbackend=AutoMooncake(; config=nothing)` to the relevant samplers. + +Support for Tracker.jl as an AD backend has been removed. + # Release 0.33.0 ## Breaking changes diff --git a/Project.toml b/Project.toml index 5f5c86b04..830c0f57a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Turing" uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" -version = "0.34.1" +version = "0.35.0" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" @@ -32,7 +32,6 @@ OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Reexport = "189a3867-3050-52da-a836-e630ba90ab69" -Requires = "ae029012-a4dd-5104-9daa-d747884805df" SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -49,45 +48,44 @@ TuringDynamicHMCExt = "DynamicHMC" TuringOptimExt = "Optim" [compat] -ADTypes = "0.2, 1" +ADTypes = "1.9" AbstractMCMC = "5.2" Accessors = "0.1" AdvancedHMC = "0.3.0, 0.4.0, 0.5.2, 0.6" AdvancedMH = "0.8" AdvancedPS = "0.6.0" AdvancedVI = "0.2" -BangBang = "0.4" +BangBang = "0.4.2" Bijectors = "0.13.6" +Compat = "4.15.0" DataStructures = "0.18" Distributions = "0.23.3, 0.24, 0.25" DistributionsAD = "0.6" DocStringExtensions = "0.8, 0.9" DynamicHMC = "3.4" -DynamicPPL = "0.28.2" -Compat = "4.15.0" +DynamicPPL = "0.29" EllipticalSliceSampling = "0.5, 1, 2" ForwardDiff = "0.10.3" -Libtask = "0.7, 0.8" +Libtask = "0.8.8" LinearAlgebra = "1" LogDensityProblems = "2" LogDensityProblemsAD = "1.7.0" MCMCChains = "5, 6" NamedArrays = "0.9, 0.10" +Optim = "1" Optimization = "3" OptimizationOptimJL = "0.1, 0.2, 0.3" OrderedCollections = "1" Printf = "1" Random = "1" -Optim = "1" Reexport = "0.2, 1" -Requires = "0.5, 1.0" -SciMLBase = "1.92.1, 2" +SciMLBase = "2" SpecialFunctions = "0.7.2, 0.8, 0.9, 0.10, 1, 2" Statistics = "1.6" StatsAPI = "1.6" StatsBase = "0.32, 0.33, 0.34" StatsFuns = "0.8, 0.9, 1" -julia = "1.7" +julia = "1.10" [extras] DynamicHMC = "bbc10e6e-7c05-544b-b16e-64fede858acb" diff --git a/docs/src/api.md b/docs/src/api.md index e2b193c01..6acda4586 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -85,13 +85,12 @@ See the [variational inference tutorial](https://turinglang.org/docs/tutorials/0 These are used to specify the automatic differentiation backend to use. See the [AD guide](https://turinglang.org/docs/tutorials/docs-10-using-turing-autodiff/) for more information. -| Exported symbol | Documentation | Description | -|:----------------- |:------------------------------------ |:----------------------------------------- | -| `AutoForwardDiff` | [`ADTypes.AutoForwardDiff`](@extref) | ForwardDiff.jl backend | -| `AutoReverseDiff` | [`ADTypes.AutoReverseDiff`](@extref) | ReverseDiff.jl backend | -| `AutoZygote` | [`ADTypes.AutoZygote`](@extref) | Zygote.jl backend | -| `AutoTracker` | [`ADTypes.AutoTracker`](@extref) | Tracker.jl backend | -| `AutoTapir` | [`ADTypes.AutoTapir`](@extref) | Tapir.jl backend, only for ADTypes >= 1.0 | +| Exported symbol | Documentation | Description | +|:----------------- |:------------------------------------ |:---------------------- | +| `AutoForwardDiff` | [`ADTypes.AutoForwardDiff`](@extref) | ForwardDiff.jl backend | +| `AutoReverseDiff` | [`ADTypes.AutoReverseDiff`](@extref) | ReverseDiff.jl backend | +| `AutoZygote` | [`ADTypes.AutoZygote`](@extref) | Zygote.jl backend | +| `AutoMooncake` | [`ADTypes.AutoMooncake`](@extref) | Mooncake.jl backend | ### Debugging diff --git a/src/Turing.jl b/src/Turing.jl index 873c270fa..dbfd5c5cf 100644 --- a/src/Turing.jl +++ b/src/Turing.jl @@ -106,7 +106,7 @@ export @model, # modelling AutoForwardDiff, # ADTypes AutoReverseDiff, AutoZygote, - AutoTracker, + AutoMooncake, setprogress!, # debugging Flat, FlatPos, @@ -136,24 +136,4 @@ export @model, # modelling MAP, MLE -# AutoTapir is only supported by ADTypes v1.0 and above. -@static if VERSION >= v"1.10" && pkgversion(ADTypes) >= v"1" - export AutoTapir -end - -if !isdefined(Base, :get_extension) - using Requires -end - -function __init__() - @static if !isdefined(Base, :get_extension) - @require Optim = "429524aa-4258-5aef-a3af-852621145aeb" include( - "../ext/TuringOptimExt.jl" - ) - @require DynamicHMC = "bbc10e6e-7c05-544b-b16e-64fede858acb" include( - "../ext/TuringDynamicHMCExt.jl" - ) - end -end - end diff --git a/src/essential/Essential.jl b/src/essential/Essential.jl index 778e2a62f..c04c7e862 100644 --- a/src/essential/Essential.jl +++ b/src/essential/Essential.jl @@ -11,7 +11,7 @@ using Bijectors: PDMatDistribution using AdvancedVI using StatsFuns: logsumexp, softmax @reexport using DynamicPPL -using ADTypes: ADTypes, AutoForwardDiff, AutoTracker, AutoReverseDiff, AutoZygote +using ADTypes: ADTypes, AutoForwardDiff, AutoReverseDiff, AutoZygote, AutoMooncake using AdvancedPS: AdvancedPS @@ -20,16 +20,10 @@ include("container.jl") export @model, @varname, AutoForwardDiff, - AutoTracker, AutoZygote, AutoReverseDiff, + AutoMooncake, @logprob_str, @prob_str -# AutoTapir is only supported by ADTypes v1.0 and above. -@static if VERSION >= v"1.10" && pkgversion(ADTypes) >= v"1" - using ADTypes: AutoTapir - export AutoTapir -end - end # module diff --git a/src/mcmc/Inference.jl b/src/mcmc/Inference.jl index c564602eb..2309db005 100644 --- a/src/mcmc/Inference.jl +++ b/src/mcmc/Inference.jl @@ -14,7 +14,6 @@ using DynamicPPL: getlogp, VarName, getsym, - vectorize, _getvns, getdist, Model, diff --git a/src/mcmc/ess.jl b/src/mcmc/ess.jl index 2910a7efd..395456ee5 100644 --- a/src/mcmc/ess.jl +++ b/src/mcmc/ess.jl @@ -85,7 +85,7 @@ struct ESSPrior{M<:Model,S<:Sampler{<:ESS},V<:AbstractVarInfo,T} dist = getdist(varinfo, vn) EllipticalSliceSampling.isgaussian(typeof(dist)) || error("[ESS] only supports Gaussian prior distributions") - vectorize(dist, mean(dist)) + DynamicPPL.tovec(mean(dist)) end return new{M,S,V,typeof(μ)}(model, sampler, varinfo, μ) end diff --git a/src/mcmc/mh.jl b/src/mcmc/mh.jl index 945e7f41c..433add6b5 100644 --- a/src/mcmc/mh.jl +++ b/src/mcmc/mh.jl @@ -179,43 +179,20 @@ end Places the values of a `NamedTuple` into the relevant places of a `VarInfo`. """ function set_namedtuple!(vi::DynamicPPL.VarInfoOrThreadSafeVarInfo, nt::NamedTuple) - # TODO: Replace this with something like - # for vn in keys(vi) - # vi = DynamicPPL.setindex!!(vi, get(nt, vn)) - # end for (n, vals) in pairs(nt) vns = vi.metadata[n].vns - nvns = length(vns) - - # if there is a single variable only - if nvns == 1 - # assign the unpacked values - if length(vals) == 1 - vi[vns[1]] = [vals[1];] - # otherwise just assign the values - else - vi[vns[1]] = [vals;] - end - # if there are multiple variables - elseif vals isa AbstractArray - nvals = length(vals) - # if values are provided as an array with a single element - if nvals == 1 - # iterate over variables and unpacked values - for (vn, val) in zip(vns, vals[1]) - vi[vn] = [val;] - end - # otherwise number of variables and number of values have to be equal - elseif nvals == nvns - # iterate over variables and values - for (vn, val) in zip(vns, vals) - vi[vn] = [val;] - end - else - error("Cannot assign `NamedTuple` to `VarInfo`") - end + if vals isa AbstractVector + vals = unvectorize(vals) + end + if length(vns) == 1 + # Only one variable, assign the values to it + DynamicPPL.setindex!(vi, vals, vns[1]) else - error("Cannot assign `NamedTuple` to `VarInfo`") + # Spread the values across the variables + length(vns) == length(vals) || error("Unequal number of variables and values") + for (vn, val) in zip(vns, vals) + DynamicPPL.setindex!(vi, val, vn) + end end end end @@ -252,10 +229,10 @@ end unvectorize(dists::AbstractVector) = length(dists) == 1 ? first(dists) : dists # possibly unpack and reshape samples according to the prior distribution -reconstruct(dist::Distribution, val::AbstractVector) = DynamicPPL.reconstruct(dist, val) -function reconstruct(dist::AbstractVector{<:UnivariateDistribution}, val::AbstractVector) - return val +function reconstruct(dist::Distribution, val::AbstractVector) + return DynamicPPL.from_vec_transform(dist)(val) end +reconstruct(dist::AbstractVector{<:UnivariateDistribution}, val::AbstractVector) = val function reconstruct(dist::AbstractVector{<:MultivariateDistribution}, val::AbstractVector) offset = 0 return map(dist) do d @@ -289,7 +266,7 @@ end :( $name = reconstruct( unvectorize(DynamicPPL.getdist.(Ref(vi), vns.$name)), - DynamicPPL.getval(vi, vns.$name), + DynamicPPL.getindex_internal(vi, vns.$name), ) ) for name in names ] @@ -432,42 +409,45 @@ end #### #### Compiler interface, i.e. tilde operators. #### -function DynamicPPL.assume(rng, spl::Sampler{<:MH}, dist::Distribution, vn::VarName, vi) +function DynamicPPL.assume( + rng::Random.AbstractRNG, spl::Sampler{<:MH}, dist::Distribution, vn::VarName, vi +) + # Just defer to `SampleFromPrior`. + retval = DynamicPPL.assume(rng, SampleFromPrior(), dist, vn, vi) + # Update the Gibbs IDs because they might have been assigned in the `SampleFromPrior` call. DynamicPPL.updategid!(vi, vn, spl) - r = vi[vn] - return r, logpdf_with_trans(dist, r, istrans(vi, vn)), vi + # Return. + return retval end function DynamicPPL.dot_assume( rng, spl::Sampler{<:MH}, dist::MultivariateDistribution, - vn::VarName, + vns::AbstractVector{<:VarName}, var::AbstractMatrix, - vi, + vi::AbstractVarInfo, ) - @assert dim(dist) == size(var, 1) - getvn = i -> VarName(vn, vn.indexing * "[:,$i]") - vns = getvn.(1:size(var, 2)) - DynamicPPL.updategid!.(Ref(vi), vns, Ref(spl)) - r = vi[vns] - var .= r - return var, sum(logpdf_with_trans(dist, r, istrans(vi, vns[1]))), vi + # Just defer to `SampleFromPrior`. + retval = DynamicPPL.dot_assume(rng, SampleFromPrior(), dist, vns[1], var, vi) + # Update the Gibbs IDs because they might have been assigned in the `SampleFromPrior` call. + DynamicPPL.updategid!.((vi,), vns, (spl,)) + # Return. + return retval end function DynamicPPL.dot_assume( rng, spl::Sampler{<:MH}, dists::Union{Distribution,AbstractArray{<:Distribution}}, - vn::VarName, + vns::AbstractArray{<:VarName}, var::AbstractArray, - vi, + vi::AbstractVarInfo, ) - getvn = ind -> VarName(vn, vn.indexing * "[" * join(Tuple(ind), ",") * "]") - vns = getvn.(CartesianIndices(var)) - DynamicPPL.updategid!.(Ref(vi), vns, Ref(spl)) - r = reshape(vi[vec(vns)], size(var)) - var .= r - return var, sum(logpdf_with_trans.(dists, r, istrans(vi, vns[1]))), vi + # Just defer to `SampleFromPrior`. + retval = DynamicPPL.dot_assume(rng, SampleFromPrior(), dists, vns, var, vi) + # Update the Gibbs IDs because they might have been assigned in the `SampleFromPrior` call. + DynamicPPL.updategid!.((vi,), vns, (spl,)) + return retval end function DynamicPPL.observe(spl::Sampler{<:MH}, d::Distribution, value, vi) diff --git a/src/mcmc/particle_mcmc.jl b/src/mcmc/particle_mcmc.jl index 579ebfae2..a2b675720 100644 --- a/src/mcmc/particle_mcmc.jl +++ b/src/mcmc/particle_mcmc.jl @@ -385,7 +385,7 @@ function DynamicPPL.assume( elseif is_flagged(vi, vn, "del") unset_flag!(vi, vn, "del") # Reference particle parent r = rand(trng, dist) - vi[vn] = vectorize(dist, r) + vi[vn] = DynamicPPL.tovec(r) DynamicPPL.setgid!(vi, spl.selector, vn) setorder!(vi, vn, get_num_produce(vi)) else diff --git a/test/Project.toml b/test/Project.toml index 67292d2af..d5d83726f 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -16,6 +16,7 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1" MCMCChains = "c7f686f2-ff18-58e9-bc7b-31028e88f75d" +Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6" NamedArrays = "86f7a689-2022-50b4-a561-43c23ac3c673" Optim = "429524aa-4258-5aef-a3af-852621145aeb" Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba" @@ -32,7 +33,6 @@ StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] @@ -45,7 +45,7 @@ Clustering = "0.14, 0.15" Distributions = "0.25" DistributionsAD = "0.6.3" DynamicHMC = "2.1.6, 3.0" -DynamicPPL = "0.28" +DynamicPPL = "0.29" FiniteDifferences = "0.10.8, 0.11, 0.12" ForwardDiff = "0.10.12 - 0.10.32, 0.10" HypothesisTests = "0.11" @@ -53,6 +53,7 @@ LinearAlgebra = "1" LogDensityProblems = "2" LogDensityProblemsAD = "1.4" MCMCChains = "5, 6" +Mooncake = "0.4.19" NamedArrays = "0.9.4, 0.10" Optim = "1" Optimization = "3" @@ -68,6 +69,5 @@ StableRNGs = "1" StatsBase = "0.33, 0.34" StatsFuns = "0.9.5, 1" TimerOutputs = "0.5" -Tracker = "0.2.11" Zygote = "0.5.4, 0.6" julia = "1.3" diff --git a/test/essential/ad.jl b/test/essential/ad.jl index 943b4eadc..0c497a2cb 100644 --- a/test/essential/ad.jl +++ b/test/essential/ad.jl @@ -2,7 +2,7 @@ module AdTests using ..Models: gdemo_default using Distributions: logpdf -using DynamicPPL: getlogp, getval +using DynamicPPL: getlogp, getindex_internal using ForwardDiff using LinearAlgebra using LogDensityProblems: LogDensityProblems @@ -24,7 +24,7 @@ function test_model_ad(model, f, syms::Vector{Symbol}) s = syms[i] vnms[i] = getfield(vi.metadata, s).vns[1] - vals = getval(vi, vnms[i]) + vals = getindex_internal(vi, vnms[i]) for i in eachindex(vals) push!(vnvals, vals[i]) end @@ -61,8 +61,8 @@ end ad_test_f(vi, SampleFromPrior()) svn = vi.metadata.s.vns[1] mvn = vi.metadata.m.vns[1] - _s = getval(vi, svn)[1] - _m = getval(vi, mvn)[1] + _s = getindex_internal(vi, svn)[1] + _m = getindex_internal(vi, mvn)[1] dist_s = InverseGamma(2, 3) @@ -88,20 +88,6 @@ end ) x = map(x -> Float64(x), vi[SampleFromPrior()]) - trackerℓ = LogDensityProblemsAD.ADgradient(Turing.AutoTracker(), ℓ) - if isdefined(Base, :get_extension) - @test trackerℓ isa - Base.get_extension( - LogDensityProblemsAD, :LogDensityProblemsADTrackerExt - ).TrackerGradientLogDensity - else - @test trackerℓ isa - LogDensityProblemsAD.LogDensityProblemsADTrackerExt.TrackerGradientLogDensity - end - @test trackerℓ.ℓ === ℓ - ∇E1 = LogDensityProblems.logdensity_and_gradient(trackerℓ, x)[2] - @test sort(∇E1) ≈ grad_FWAD atol = 1e-9 - zygoteℓ = LogDensityProblemsAD.ADgradient(Turing.AutoZygote(), ℓ) if isdefined(Base, :get_extension) @test zygoteℓ isa @@ -149,7 +135,7 @@ end test_model_ad(wishart_ad(), logp3, [:v]) end - @testset "Simplex Tracker, Zygote and ReverseDiff (with and without caching) AD" begin + @testset "Simplex Zygote and ReverseDiff (with and without caching) AD" begin @model function dir() return theta ~ Dirichlet(1 ./ fill(4, 4)) end diff --git a/test/ext/OptimInterface.jl b/test/ext/OptimInterface.jl index 817d7a520..e9d16535a 100644 --- a/test/ext/OptimInterface.jl +++ b/test/ext/OptimInterface.jl @@ -112,7 +112,8 @@ using Turing @test isapprox(map1.values.array, map2.values.array) end - # FIXME: Some models doesn't work for Tracker and ReverseDiff. + # FIXME: Some models don't work for ReverseDiff. + # TODO: Check if above statement is still correct @testset "MAP for $(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS result_true = DynamicPPL.TestUtils.posterior_optima(model) diff --git a/test/mcmc/Inference.jl b/test/mcmc/Inference.jl index 7ee23fc7a..a92f9c763 100644 --- a/test/mcmc/Inference.jl +++ b/test/mcmc/Inference.jl @@ -12,11 +12,10 @@ using LinearAlgebra: I import MCMCChains import Random import ReverseDiff +import Mooncake using Test: @test, @test_throws, @testset using Turing -ADUtils.install_tapir && import Tapir - @testset "Testing inference.jl with $adbackend" for adbackend in ADUtils.adbackends # Only test threading if 1.3+. if VERSION > v"1.2" @@ -383,7 +382,6 @@ ADUtils.install_tapir && import Tapir chn = sample(gdemo_default, alg, 1000) end @testset "vectorization @." begin - # https://github.com/FluxML/Tracker.jl/issues/119 @model function vdemo1(x) s ~ InverseGamma(2, 3) m ~ Normal(0, sqrt(s)) @@ -589,7 +587,7 @@ ADUtils.install_tapir && import Tapir true) @model function demo_incorrect_missing(y) - return y[1:1] ~ MvNormal(zeros(1), 1) + return y[1:1] ~ MvNormal(zeros(1), I) end @test_throws ErrorException sample( demo_incorrect_missing([missing]), NUTS(), 1000; check_model=true diff --git a/test/mcmc/abstractmcmc.jl b/test/mcmc/abstractmcmc.jl index 43e3966a9..449b43b71 100644 --- a/test/mcmc/abstractmcmc.jl +++ b/test/mcmc/abstractmcmc.jl @@ -12,12 +12,11 @@ using LogDensityProblemsAD: LogDensityProblemsAD using Random: Random using ReverseDiff: ReverseDiff using StableRNGs: StableRNG +import Mooncake using Test: @test, @test_throws, @testset using Turing using Turing.Inference: AdvancedHMC -ADUtils.install_tapir && import Tapir - function initialize_nuts(model::Turing.Model) # Create a log-density function with an implementation of the # gradient so we ensure that we're using the same AD backend as in Turing. @@ -117,10 +116,7 @@ end @testset "External samplers" begin @testset "AdvancedHMC.jl" begin - # TODO(mhauru) The below tests fail with Tapir, see - # https://github.com/TuringLang/Turing.jl/pull/2289. - # Once that is fixed, this should say `for adtype in ADUtils.adbackends`. - @testset "adtype=$adtype" for adtype in [AutoForwardDiff(), AutoReverseDiff()] + @testset "adtype=$adtype" for adtype in ADUtils.adbackends @testset "$(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS # Need some functionality to initialize the sampler. # TODO: Remove this once the constructors in the respective packages become "lazy". diff --git a/test/mcmc/gibbs.jl b/test/mcmc/gibbs.jl index 6868cb5e8..cd044910b 100644 --- a/test/mcmc/gibbs.jl +++ b/test/mcmc/gibbs.jl @@ -8,13 +8,12 @@ using Distributions: sample using ForwardDiff: ForwardDiff using Random: Random using ReverseDiff: ReverseDiff +import Mooncake using Test: @test, @testset using Turing using Turing: Inference using Turing.RandomMeasures: ChineseRestaurantProcess, DirichletProcess -ADUtils.install_tapir && import Tapir - @testset "Testing gibbs.jl with $adbackend" for adbackend in ADUtils.adbackends @testset "gibbs constructor" begin N = 500 diff --git a/test/mcmc/gibbs_conditional.jl b/test/mcmc/gibbs_conditional.jl index 3f02c7594..d6d81cbe0 100644 --- a/test/mcmc/gibbs_conditional.jl +++ b/test/mcmc/gibbs_conditional.jl @@ -12,11 +12,10 @@ using ReverseDiff: ReverseDiff using StableRNGs: StableRNG using StatsBase: counts using StatsFuns: StatsFuns +import Mooncake using Test: @test, @testset using Turing -ADUtils.install_tapir && import Tapir - @testset "Testing gibbs conditionals.jl with $adbackend" for adbackend in ADUtils.adbackends Random.seed!(1000) rng = StableRNG(123) diff --git a/test/mcmc/hmc.jl b/test/mcmc/hmc.jl index dde977a6f..7404dbf43 100644 --- a/test/mcmc/hmc.jl +++ b/test/mcmc/hmc.jl @@ -2,7 +2,6 @@ module HMCTests using ..Models: gdemo_default using ..ADUtils: ADTypeCheckContext -#using ..Models: gdemo using ..NumericalTests: check_gdemo, check_numerical import ..ADUtils using Distributions: Bernoulli, Beta, Categorical, Dirichlet, Normal, Wishart, sample @@ -15,11 +14,10 @@ using LinearAlgebra: I, dot, vec import Random using StableRNGs: StableRNG using StatsFuns: logistic +import Mooncake using Test: @test, @test_logs, @testset using Turing -ADUtils.install_tapir && import Tapir - @testset "Testing hmc.jl with $adbackend" for adbackend in ADUtils.adbackends # Set a seed rng = StableRNG(123) @@ -269,19 +267,15 @@ ADUtils.install_tapir && import Tapir end end - # Disable on Julia <1.8 due to https://github.com/TuringLang/Turing.jl/pull/2197. - # TODO: Remove this block once https://github.com/JuliaFolds2/BangBang.jl/pull/22 has been released. - if VERSION ≥ v"1.8" - @testset "(partially) issue: #2095" begin - @model function vector_of_dirichlet(::Type{TV}=Vector{Float64}) where {TV} - xs = Vector{TV}(undef, 2) - xs[1] ~ Dirichlet(ones(5)) - xs[2] ~ Dirichlet(ones(5)) - end - model = vector_of_dirichlet() - chain = sample(model, NUTS(), 1000) - @test mean(Array(chain)) ≈ 0.2 + @testset "(partially) issue: #2095" begin + @model function vector_of_dirichlet(::Type{TV}=Vector{Float64}) where {TV} + xs = Vector{TV}(undef, 2) + xs[1] ~ Dirichlet(ones(5)) + xs[2] ~ Dirichlet(ones(5)) end + model = vector_of_dirichlet() + chain = sample(model, NUTS(), 1000) + @test mean(Array(chain)) ≈ 0.2 end @testset "issue: #2195" begin diff --git a/test/mcmc/mh.jl b/test/mcmc/mh.jl index a01d3dc25..8813834ed 100644 --- a/test/mcmc/mh.jl +++ b/test/mcmc/mh.jl @@ -185,27 +185,40 @@ GKernel(var) = (x) -> Normal(x, sqrt.(var)) # @test v1 < v2 end - # Disable on Julia <1.8 due to https://github.com/TuringLang/Turing.jl/pull/2197. - # TODO: Remove this block once https://github.com/JuliaFolds2/BangBang.jl/pull/22 has been released. - if VERSION ≥ v"1.8" - @testset "vector of multivariate distributions" begin - @model function test(k) - T = Vector{Vector{Float64}}(undef, k) - for i in 1:k - T[i] ~ Dirichlet(5, 1.0) - end + @testset "vector of multivariate distributions" begin + @model function test(k) + T = Vector{Vector{Float64}}(undef, k) + for i in 1:k + T[i] ~ Dirichlet(5, 1.0) end + end - Random.seed!(100) - chain = sample(test(1), MH(), 5_000) - for i in 1:5 - @test mean(chain, "T[1][$i]") ≈ 0.2 atol = 0.01 - end + Random.seed!(100) + chain = sample(test(1), MH(), 5_000) + for i in 1:5 + @test mean(chain, "T[1][$i]") ≈ 0.2 atol = 0.01 + end + Random.seed!(100) + chain = sample(test(10), MH(), 5_000) + for j in 1:10, i in 1:5 + @test mean(chain, "T[$j][$i]") ≈ 0.2 atol = 0.01 + end + end + + @testset "LKJCholesky" begin + for uplo in ['L', 'U'] + @model f() = x ~ LKJCholesky(2, 1, uplo) Random.seed!(100) - chain = sample(test(10), MH(), 5_000) - for j in 1:10, i in 1:5 - @test mean(chain, "T[$j][$i]") ≈ 0.2 atol = 0.01 + chain = sample(f(), MH(), 5_000) + indices = [(1, 1), (2, 1), (2, 2)] + values = [1, 0, 0.785] + for ((i, j), v) in zip(indices, values) + if uplo == 'U' # Transpose + @test mean(chain, "x.$uplo[$j, $i]") ≈ v atol = 0.01 + else + @test mean(chain, "x.$uplo[$i, $j]") ≈ v atol = 0.01 + end end end end diff --git a/test/mcmc/sghmc.jl b/test/mcmc/sghmc.jl index 95b3bc543..1f8179503 100644 --- a/test/mcmc/sghmc.jl +++ b/test/mcmc/sghmc.jl @@ -8,11 +8,10 @@ import ForwardDiff using LinearAlgebra: dot import ReverseDiff using StableRNGs: StableRNG +import Mooncake using Test: @test, @testset using Turing -ADUtils.install_tapir && import Tapir - @testset "Testing sghmc.jl with $adbackend" for adbackend in ADUtils.adbackends @testset "sghmc constructor" begin alg = SGHMC(; learning_rate=0.01, momentum_decay=0.1, adtype=adbackend) diff --git a/test/optimisation/Optimisation.jl b/test/optimisation/Optimisation.jl index 1ba073864..d8afd83db 100644 --- a/test/optimisation/Optimisation.jl +++ b/test/optimisation/Optimisation.jl @@ -1,11 +1,13 @@ module OptimisationTests using ..Models: gdemo, gdemo_default -using ..ADUtils: ADTypeCheckContext +using ..ADUtils: ADUtils using Distributions using Distributions.FillArrays: Zeros using DynamicPPL: DynamicPPL +using ForwardDiff: ForwardDiff using LinearAlgebra: Diagonal, I +using Mooncake: Mooncake using Random: Random using Optimization using Optimization: Optimization @@ -617,12 +619,24 @@ using Turing @assert get(result, :c) == (; :c => Array{Float64}[]) end - @testset "ADType" begin + @testset "ADType test with $adbackend" for adbackend in ADUtils.adbackends Random.seed!(222) - for adbackend in (AutoReverseDiff(), AutoForwardDiff(), AutoTracker()) - m = DynamicPPL.contextualize( - gdemo_default, ADTypeCheckContext(adbackend, gdemo_default.context) - ) + m = DynamicPPL.contextualize( + gdemo_default, ADUtils.ADTypeCheckContext(adbackend, gdemo_default.context) + ) + if adbackend isa AutoMooncake + # Optimization.jl does not support Mooncake as an AD backend, see + # https://docs.sciml.ai/Optimization/stable/API/ad/#ad + # If it ever does, then we should just run them to make sure they don't error + err_msg = "The passed automatic differentiation backend choice is not available" + @test_throws err_msg maximum_likelihood(m; adtype=adbackend) + @test_throws err_msg maximum_a_posteriori(m; adtype=adbackend) + elseif adbackend isa AutoForwardDiff + # TODO: Figure out why this is happening. + # https://github.com/TuringLang/Turing.jl/issues/2369 + @test_throws DivideError maximum_likelihood(m; adtype=adbackend) + @test_throws DivideError maximum_a_posteriori(m; adtype=adbackend) + else # These will error if the adbackend being used is not the one set. maximum_likelihood(m; adtype=adbackend) maximum_a_posteriori(m; adtype=adbackend) diff --git a/test/test_utils/ad_utils.jl b/test/test_utils/ad_utils.jl index e900a8f69..f7358de75 100644 --- a/test/test_utils/ad_utils.jl +++ b/test/test_utils/ad_utils.jl @@ -4,8 +4,8 @@ using ForwardDiff: ForwardDiff using Pkg: Pkg using Random: Random using ReverseDiff: ReverseDiff +using Mooncake: Mooncake using Test: Test -using Tracker: Tracker using Turing: Turing using Turing: DynamicPPL using Zygote: Zygote @@ -30,18 +30,10 @@ const eltypes_by_adtype = Dict( ReverseDiff.TrackedVecOrMat, ReverseDiff.TrackedVector, ), + Turing.AutoMooncake => (Mooncake.CoDual,), # Zygote.Dual is actually the same as ForwardDiff.Dual, so can't distinguish between the # two by element type. However, we have other checks for Zygote, see check_adtype. Turing.AutoZygote => (Zygote.Dual,), - Turing.AutoTracker => ( - Tracker.Tracked, - Tracker.TrackedArray, - Tracker.TrackedMatrix, - Tracker.TrackedReal, - Tracker.TrackedStyle, - Tracker.TrackedVecOrMat, - Tracker.TrackedVector, - ), ) """ @@ -245,7 +237,8 @@ Test.@testset "ADTypeCheckContext" begin Turing.AutoForwardDiff(), Turing.AutoReverseDiff(), Turing.AutoZygote(), - Turing.AutoTracker(), + # TODO: Mooncake + # Turing.AutoMooncake(config=nothing), ) for actual_adtype in adtypes sampler = Turing.HMC(0.1, 5; adtype=actual_adtype) @@ -281,17 +274,9 @@ end All the ADTypes on which we want to run the tests. """ adbackends = [ - Turing.AutoForwardDiff(; chunksize=0), Turing.AutoReverseDiff(; compile=false) + Turing.AutoForwardDiff(; chunksize=0), + Turing.AutoReverseDiff(; compile=false), + Turing.AutoMooncake(; config=nothing), ] -# Tapir isn't supported for older Julia versions, hence the check. -install_tapir = isdefined(Turing, :AutoTapir) -if install_tapir - # TODO(mhauru) Is there a better way to install optional dependencies like this? - Pkg.add("Tapir") - using Tapir - push!(adbackends, Turing.AutoTapir(false)) - push!(eltypes_by_adtype, Turing.AutoTapir => (Tapir.CoDual,)) -end - end