From 03d0e78b0111cfc60652d1c7306515bd65a813be Mon Sep 17 00:00:00 2001 From: Markus Hauru Date: Wed, 5 Jun 2024 17:29:56 +0200 Subject: [PATCH] Test restructuring (#2237) * Remove unused test util function * Refactoring ad and optiminterface tests to modules * Add SelectiveTests.jl * Rework tests CI GA * Switch test CI on Mac back to x64 * Remove coverage from CI. Improve matrix. * Wrap all tests in modules. * Remove unused test utils * Remove GA workflows for DynamicHMC and Numerical * Rename TuringCI GA to Tests * Fix test_args passing in CI * Fix for CI test matrix * Fixes to various test files * Add container.jl to test suite * Fix spacing around * in test includes * Split ad.jl and abstractmcmc.jl tests to separate CI jobs * Alphabetise imports in tests * In tests, use import X over using X: X * Add missing imports to abstractmcmc.jl tests * Add some missing imports to tests * Merge ad_utils.jl to ad.jl in tests * Merge testing_functions.jl into mh.jl in tests * Simplify test_utils Turn all of them into modules or merge them into other files that used to `include` them. * Add missing import to numerical_tests.jl * Update Project.toml (#2244) * Update README.md * Export adtype `AutoTapir` (#2236) * Export adtype `AutoTapir` * Update Project.toml * Fix missing AutoTapir (#2242) * Update Essential.jl * Update Project.toml * Drop support for ADTypes 0.2 (#2243) ADTypes 0.2 doesn't support AutoTapir yet. * Optimization improvements (#2221) * initial work on interface * Improving the Optimization.jl interface, work in progress * More work on Optimization.jl, still in progress * Add docstrings to Optimisation.jl * Fix OptimizationOptimJL version constraint * Clean up optimisation TODO notes * Relax OptimizationOptimJL version constraints * Simplify optimization imports * Remove commented out code * Small improvements all over in optimisation * Clean up of Optimisation tests * Add a test for OptimizationBBO * Add tests using OptimizationNLopt * Rename/move the optimisation test files The files for Optimisaton.jl and OptimInterface.jl were in the wrong folders: One in `test/optimisation` the other in `test/ext`, but the wrong way around. * Relax compat bounds on OptimizationBBO and OptimizationNLopt * Split a testset to test/optimisation/OptimisationCore.jl * Import AbstractADType from ADTypes, not SciMLBase * Fix Optimization.jl depwarning * Fix seeds in more tests * Merge OptimizationCore into Optimization * In optimisation, rename init_value to initial_params * Optimisation docstring improvements * Code style adjustments in optimisation * Qualify references in optimisation * Simplify creation of ModeResults * Qualified references in optimization tests * Enforce line length in optimization * Simplify optimisation exports * Enforce line legth in Optim.jl interface * Refactor away ModeEstimationProblem * Style and docstring improvements for optimisation * Add := test to optimisation tests. * Clarify comment * Simplify generate_initial_params * Fix doc references * Rename testsets * Refactor check_success * Make initial_params a kwarg * Remove unnecessary type constrain on kwarg * Fix broken reference in tests * Fix bug in generate_initial_params * Fix qualified references in optimisation tests * Add hasstats checks to optimisation tests * Extend OptimizationOptimJL compat to 0.3 Co-authored-by: Hong Ge <3279477+yebai@users.noreply.github.com> * Change some `import`s to `using` Co-authored-by: Tor Erlend Fjelde * Change to kwargs... in docstrings * Add a two-argument method to OptimLogDensity as callable --------- Co-authored-by: Tor Erlend Fjelde Co-authored-by: Hong Ge <3279477+yebai@users.noreply.github.com> * Update Project.toml * CompatHelper: bump compat for OptimizationOptimJL to 0.3 for package test, (keep existing compat) (#2246) Co-authored-by: CompatHelper Julia --------- Co-authored-by: Markus Hauru Co-authored-by: Tor Erlend Fjelde Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: CompatHelper Julia * Set fail-fast: false for CI test matrix * Add a step to print matrix variables to tests Action * Fix typo in tests Action * ADTypes v0.2 compatibility for test restructuring (#2253) * Restore compat with ADTypes v0.2. Make AutoTapir export conditional. * Fix AutoTapir export in Essential.jl --------- Co-authored-by: Hong Ge <3279477+yebai@users.noreply.github.com> Co-authored-by: Tor Erlend Fjelde Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: CompatHelper Julia --- .github/workflows/DynamicHMC.yml | 42 ---------- .github/workflows/Numerical.yml | 41 --------- .github/workflows/Tests.yml | 87 +++++++++++++++++++ .github/workflows/TuringCI.yml | 70 ---------------- Project.toml | 2 +- test/essential/ad.jl | 61 +++++++++++++- test/essential/container.jl | 14 +++- test/experimental/gibbs.jl | 12 ++- test/ext/OptimInterface.jl | 16 +++- test/ext/dynamichmc.jl | 15 +++- test/mcmc/Inference.jl | 18 ++++ test/mcmc/abstractmcmc.jl | 21 ++++- test/mcmc/emcee.jl | 13 +++ test/mcmc/ess.jl | 17 +++- test/mcmc/gibbs.jl | 24 +++++- test/mcmc/gibbs_conditional.jl | 24 +++++- test/mcmc/hmc.jl | 56 +++++++++---- test/mcmc/is.jl | 15 +++- test/mcmc/mh.jl | 39 +++++++-- test/mcmc/particle_mcmc.jl | 30 +++++-- test/mcmc/sghmc.jl | 22 ++++- test/mcmc/utilities.jl | 12 +++ test/optimisation/Optimisation.jl | 22 ++++- test/runtests.jl | 68 +++++---------- test/stdlib/RandomMeasures.jl | 22 +++-- test/stdlib/distributions.jl | 23 +++-- test/test_utils/AllUtils.jl | 7 -- test/test_utils/SelectiveTests.jl | 59 +++++++++++++ test/test_utils/ad_utils.jl | 106 ------------------------ test/test_utils/models.jl | 11 ++- test/test_utils/numerical_tests.jl | 11 +++ test/test_utils/random_measure_utils.jl | 34 -------- test/test_utils/staging.jl | 52 ------------ test/test_utils/testing_functions.jl | 26 ------ test/variational/advi.jl | 26 ++++-- test/variational/optimisers.jl | 10 +++ 36 files changed, 621 insertions(+), 507 deletions(-) delete mode 100644 .github/workflows/DynamicHMC.yml delete mode 100644 .github/workflows/Numerical.yml create mode 100644 .github/workflows/Tests.yml delete mode 100644 .github/workflows/TuringCI.yml delete mode 100644 test/test_utils/AllUtils.jl create mode 100644 test/test_utils/SelectiveTests.jl delete mode 100644 test/test_utils/ad_utils.jl delete mode 100644 test/test_utils/random_measure_utils.jl delete mode 100644 test/test_utils/staging.jl delete mode 100644 test/test_utils/testing_functions.jl diff --git a/.github/workflows/DynamicHMC.yml b/.github/workflows/DynamicHMC.yml deleted file mode 100644 index 099f70fcf..000000000 --- a/.github/workflows/DynamicHMC.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: DynamicHMC-CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - runs-on: ${{ matrix.os }} - strategy: - matrix: - version: - - '1.7' - - '1' - os: - - ubuntu-latest - arch: - - x64 - steps: - - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@v1 - with: - version: ${{ matrix.version }} - arch: ${{ matrix.arch }} - - uses: actions/cache@v1 - env: - cache-name: cache-artifacts - with: - path: ~/.julia/artifacts - key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - restore-keys: | - ${{ runner.os }}-test-${{ env.cache-name }}- - ${{ runner.os }}-test- - ${{ runner.os }}- - - uses: julia-actions/julia-buildpkg@latest - - uses: julia-actions/julia-runtest@latest - with: - coverage: false - env: - STAGE: dynamichmc diff --git a/.github/workflows/Numerical.yml b/.github/workflows/Numerical.yml deleted file mode 100644 index 314241fbe..000000000 --- a/.github/workflows/Numerical.yml +++ /dev/null @@ -1,41 +0,0 @@ -name: Numerical - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - runs-on: ${{ matrix.os }} - strategy: - matrix: - version: - - '1.7' - - '1' - os: - - ubuntu-latest - arch: - - x64 - steps: - - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@v1 - with: - version: ${{ matrix.version }} - arch: ${{ matrix.arch }} - - uses: actions/cache@v1 - env: - cache-name: cache-artifacts - with: - path: ~/.julia/artifacts - key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - restore-keys: | - ${{ runner.os }}-test-${{ env.cache-name }}- - ${{ runner.os }}-test- - - uses: julia-actions/julia-buildpkg@latest - - uses: julia-actions/julia-runtest@latest - with: - coverage: false - env: - STAGE: numerical diff --git a/.github/workflows/Tests.yml b/.github/workflows/Tests.yml new file mode 100644 index 000000000..4f8ac4dcc --- /dev/null +++ b/.github/workflows/Tests.yml @@ -0,0 +1,87 @@ +name: Tests + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + runs-on: ${{ matrix.os }} + continue-on-error: ${{ matrix.version == 'nightly' }} + + strategy: + fail-fast: false + matrix: + test-args: + # Run some of the slower test files individually. The last one catches everything + # not included in the others. + - "essential/ad.jl" + - "mcmc/gibbs.jl" + - "mcmc/hmc.jl" + - "mcmc/abstractmcmc.jl" + - "mcmc/Inference.jl" + - "experimental/gibbs.jl" + - "mcmc/ess.jl" + - "--skip essential/ad.jl mcmc/gibbs.jl mcmc/hmc.jl mcmc/abstractmcmc.jl mcmc/Inference.jl experimental/gibbs.jl mcmc/ess.jl" + version: + - '1.7' + - '1' + os: + - ubuntu-latest + - windows-latest + - macOS-latest + arch: + - x64 + - x86 + num_threads: + - 1 + - 2 + exclude: + # With Windows and macOS, only run Julia 1.7, x64, 2 threads. We just want to see + # some combination work on OSes other than Ubuntu. + - os: windows-latest + version: '1' + - os: macOS-latest + version: '1' + - os: windows-latest + arch: x86 + - os: macOS-latest + arch: x86 + - os: windows-latest + num_threads: 1 + - os: macOS-latest + num_threads: 1 + # It's sufficient to test x86 with one version of Julia and one thread. + - version: '1' + arch: x86 + - num_threads: 2 + arch: x86 + + steps: + - name: Print matrix variables + run: | + echo "OS: ${{ matrix.os }}" + echo "Architecture: ${{ matrix.arch }}" + echo "Julia version: ${{ matrix.version }}" + echo "Number of threads: ${{ matrix.num_threads }}" + echo "Test arguments: ${{ matrix.test-args }}" + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 + with: + version: '${{ matrix.version }}' + arch: ${{ matrix.arch }} + - uses: actions/cache@v4 + env: + cache-name: cache-artifacts + with: + path: ~/.julia/artifacts + key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} + restore-keys: | + ${{ runner.os }}-test-${{ env.cache-name }}- + ${{ runner.os }}-test- + ${{ runner.os }}- + - uses: julia-actions/julia-buildpkg@latest + - name: Call Pkg.test + run: julia --color=yes --depwarn=yes --check-bounds=yes --threads=${{ matrix.num_threads }} --project=@. -e 'import Pkg; Pkg.test(; test_args=ARGS)' -- ${{ matrix.test-args }} diff --git a/.github/workflows/TuringCI.yml b/.github/workflows/TuringCI.yml deleted file mode 100644 index 88cc27bcb..000000000 --- a/.github/workflows/TuringCI.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: Turing-CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - runs-on: ${{ matrix.os }} - continue-on-error: ${{ matrix.version == 'nightly' }} - strategy: - matrix: - version: - - '1.7' - - '1' - os: - - ubuntu-latest - arch: - - x64 - num_threads: - - 1 - - 2 - include: - - version: '1.7' - os: ubuntu-latest - arch: x86 - num_threads: 2 - - version: '1.7' - os: windows-latest - arch: x64 - num_threads: 2 - - version: '1.7' - os: macOS-latest - arch: x64 - num_threads: 2 - steps: - - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@v1 - with: - version: ${{ matrix.version }} - arch: ${{ matrix.arch }} - - uses: actions/cache@v1 - env: - cache-name: cache-artifacts - with: - path: ~/.julia/artifacts - key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - restore-keys: | - ${{ runner.os }}-test-${{ env.cache-name }}- - ${{ runner.os }}-test- - ${{ runner.os }}- - - uses: julia-actions/julia-buildpkg@latest - - uses: julia-actions/julia-runtest@latest - with: - coverage: ${{ matrix.version == '1.6' && matrix.os == 'ubuntu-latest' && matrix.num_threads == 1 }} - env: - JULIA_NUM_THREADS: ${{ matrix.num_threads }} - - uses: julia-actions/julia-processcoverage@v1 - if: matrix.version == '1.7' && matrix.os == 'ubuntu-latest' && matrix.num_threads == 1 - - uses: codecov/codecov-action@v1 - if: matrix.version == '1.7' && matrix.os == 'ubuntu-latest' && matrix.num_threads == 1 - with: - file: lcov.info - - uses: coverallsapp/github-action@master - if: matrix.version == '1.7' && matrix.os == 'ubuntu-latest' && matrix.num_threads == 1 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - path-to-lcov: lcov.info diff --git a/Project.toml b/Project.toml index f8c04d7c7..99e7b316b 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Turing" uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" -version = "0.32.3" +version = "0.33" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" diff --git a/test/essential/ad.jl b/test/essential/ad.jl index 09655ad54..b67c8bd56 100644 --- a/test/essential/ad.jl +++ b/test/essential/ad.jl @@ -1,5 +1,59 @@ +module AdTests + +using ..Models: gdemo_default +using Distributions: logpdf +using DynamicPPL: getlogp, getval +using ForwardDiff +using LinearAlgebra +import LogDensityProblems +import LogDensityProblemsAD +using ReverseDiff +using Test: @test, @testset +using Turing +using Turing: SampleFromPrior +using Zygote + +function test_model_ad(model, f, syms::Vector{Symbol}) + # Set up VI. + vi = Turing.VarInfo(model) + + # Collect symbols. + vnms = Vector(undef, length(syms)) + vnvals = Vector{Float64}() + for i in 1:length(syms) + s = syms[i] + vnms[i] = getfield(vi.metadata, s).vns[1] + + vals = getval(vi, vnms[i]) + for i in eachindex(vals) + push!(vnvals, vals[i]) + end + end + + # Compute primal. + x = vec(vnvals) + logp = f(x) + + # Call ForwardDiff's AD directly. + grad_FWAD = sort(ForwardDiff.gradient(f, x)) + + # Compare with `logdensity_and_gradient`. + z = vi[SampleFromPrior()] + for chunksize in (0, 1, 10), standardtag in (true, false, 0, 3) + ℓ = LogDensityProblemsAD.ADgradient( + Turing.AutoForwardDiff(; chunksize=chunksize, tag=standardtag), + Turing.LogDensityFunction(vi, model, SampleFromPrior(), DynamicPPL.DefaultContext()), + ) + l, ∇E = LogDensityProblems.logdensity_and_gradient(ℓ, z) + + # Compare result + @test l ≈ logp + @test sort(∇E) ≈ grad_FWAD atol = 1e-9 + end +end + @testset "ad.jl" begin - @turing_testset "adr" begin + @testset "adr" begin ad_test_f = gdemo_default vi = Turing.VarInfo(ad_test_f) ad_test_f(vi, SampleFromPrior()) @@ -50,7 +104,8 @@ ∇E2 = LogDensityProblems.logdensity_and_gradient(zygoteℓ, x)[2] @test sort(∇E2) ≈ grad_FWAD atol = 1e-9 end - @turing_testset "general AD tests" begin + + @testset "general AD tests" begin # Tests gdemo gradient. function logp1(x::Vector) dist_s = InverseGamma(2, 3) @@ -179,3 +234,5 @@ @test ℓ_grad == ℓ_grad_compiled end end + +end diff --git a/test/essential/container.jl b/test/essential/container.jl index 635d684b4..90e56f46e 100644 --- a/test/essential/container.jl +++ b/test/essential/container.jl @@ -1,3 +1,11 @@ +module ContainerTests + +import AdvancedPS +using Distributions: Bernoulli, Beta, Gamma, Normal +using DynamicPPL: @model, Sampler +using Test: @test, @testset +using Turing + @testset "container.jl" begin @model function test() a ~ Normal(0, 1) @@ -9,7 +17,7 @@ x end - @turing_testset "constructor" begin + @testset "constructor" begin vi = DynamicPPL.VarInfo() sampler = Sampler(PG(10)) model = test() @@ -29,7 +37,7 @@ @test DynamicPPL.get_num_produce(newtrace.model.f.varinfo) == 1 end - @turing_testset "fork" begin + @testset "fork" begin @model function normal() a ~ Normal(0, 1) 3 ~ Normal(a, 2) @@ -48,3 +56,5 @@ @test AdvancedPS.advance!(trace) ≈ AdvancedPS.advance!(newtrace) end end + +end diff --git a/test/experimental/gibbs.jl b/test/experimental/gibbs.jl index 1fb27e705..3f7147e66 100644 --- a/test/experimental/gibbs.jl +++ b/test/experimental/gibbs.jl @@ -1,4 +1,12 @@ -using Test, Random, Turing, DynamicPPL +module ExperimentalGibbsTests + +using ..Models: MoGtest_default, MoGtest_default_z_vector, gdemo +using ..NumericalTests: check_MoGtest_default, check_MoGtest_default_z_vector, check_gdemo, + check_numerical +using DynamicPPL +using Random +using Test +using Turing function check_transition_varnames( transition::Turing.Inference.Transition, @@ -187,3 +195,5 @@ end check_MoGtest_default_z_vector(chain, atol = 0.2) end end + +end diff --git a/test/ext/OptimInterface.jl b/test/ext/OptimInterface.jl index 0f888f6f6..7bb72f3cc 100644 --- a/test/ext/OptimInterface.jl +++ b/test/ext/OptimInterface.jl @@ -1,4 +1,16 @@ -@numerical_testset "TuringOptimExt" begin +module OptimInterfaceTests + +using ..Models: gdemo_default +using Distributions.FillArrays: Zeros +using LinearAlgebra: I +import Optim +import Random +import StatsBase +using StatsBase: coef, coefnames, coeftable, informationmatrix, stderror, vcov +using Test: @test, @testset +using Turing + +@testset "TuringOptimExt" begin @testset "MLE" begin Random.seed!(222) true_value = [0.0625, 1.75] @@ -177,3 +189,5 @@ @test result.values[:y] ≈ 100 atol=1e-1 end end + +end diff --git a/test/ext/dynamichmc.jl b/test/ext/dynamichmc.jl index 61027a196..d525512ca 100644 --- a/test/ext/dynamichmc.jl +++ b/test/ext/dynamichmc.jl @@ -1,5 +1,16 @@ +module DynamicHMCTests + +using ..Models: gdemo_default +using ..NumericalTests: check_gdemo +using Test: @test, @testset +using Distributions: sample +import DynamicHMC +import DynamicPPL +using DynamicPPL: Sampler +import Random +using Turing + @testset "TuringDynamicHMCExt" begin - import DynamicHMC Random.seed!(100) @test DynamicPPL.alg_str(Sampler(externalsampler(DynamicHMC.NUTS()))) == "DynamicNUTS" @@ -8,3 +19,5 @@ chn = sample(gdemo_default, spl, 10_000) check_gdemo(chn) end + +end diff --git a/test/mcmc/Inference.jl b/test/mcmc/Inference.jl index e1d0f9e96..f7601b2e1 100644 --- a/test/mcmc/Inference.jl +++ b/test/mcmc/Inference.jl @@ -1,3 +1,19 @@ +module InferenceTests + +using ..Models: gdemo_d, gdemo_default +using ..NumericalTests: check_gdemo, check_numerical +using Distributions: Bernoulli, Beta, InverseGamma, Normal +using Distributions: sample +import DynamicPPL +using DynamicPPL: Sampler, getlogp +import ForwardDiff +using LinearAlgebra: I +import MCMCChains +import Random +import ReverseDiff +using Test: @test, @test_throws, @testset +using Turing + @testset "Testing inference.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) # Only test threading if 1.3+. if VERSION > v"1.2" @@ -544,3 +560,5 @@ @test all(xs[:, 2] .=== [missing, 2, 4]) end end + +end diff --git a/test/mcmc/abstractmcmc.jl b/test/mcmc/abstractmcmc.jl index 378b22fba..57554c0f2 100644 --- a/test/mcmc/abstractmcmc.jl +++ b/test/mcmc/abstractmcmc.jl @@ -1,3 +1,18 @@ +module AbstractMCMCTests + +import AdvancedMH +using Distributions: sample +using Distributions.FillArrays: Zeros +import DynamicPPL +import ForwardDiff +using LinearAlgebra: I +import LogDensityProblems +import LogDensityProblemsAD +import Random +import ReverseDiff +using StableRNGs: StableRNG +using Test: @test, @test_throws, @testset +using Turing using Turing.Inference: AdvancedHMC function initialize_nuts(model::Turing.Model) @@ -83,7 +98,7 @@ function test_initial_params(model, sampler, initial_params=DynamicPPL.VarInfo(m end @testset "External samplers" begin - @turing_testset "AdvancedHMC.jl" begin + @testset "AdvancedHMC.jl" begin # Try a few different AD backends. @testset "adtype=$adtype" for adtype in [AutoForwardDiff(), AutoReverseDiff()] @testset "$(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS @@ -135,7 +150,7 @@ end end end - @turing_testset "AdvancedMH.jl" begin + @testset "AdvancedMH.jl" begin @testset "RWMH" begin @testset "$(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS # Need some functionality to initialize the sampler. @@ -181,3 +196,5 @@ end # end end end + +end diff --git a/test/mcmc/emcee.jl b/test/mcmc/emcee.jl index 929506f95..04901775e 100644 --- a/test/mcmc/emcee.jl +++ b/test/mcmc/emcee.jl @@ -1,3 +1,14 @@ +module EmceeTests + +using ..Models: gdemo_default +using ..NumericalTests: check_gdemo +using Distributions: sample +import DynamicPPL +using DynamicPPL: Sampler +import Random +using Test: @test, @test_throws, @testset +using Turing + @testset "emcee.jl" begin @testset "gdemo" begin Random.seed!(9876) @@ -41,3 +52,5 @@ @test chain[:m] == fill(1.0, 1, nwalkers) end end + +end diff --git a/test/mcmc/ess.jl b/test/mcmc/ess.jl index a11068656..86029e43f 100644 --- a/test/mcmc/ess.jl +++ b/test/mcmc/ess.jl @@ -1,3 +1,14 @@ +module ESSTests + +using ..Models: MoGtest, MoGtest_default, gdemo, gdemo_default +using ..NumericalTests: check_MoGtest_default, check_numerical +using Distributions: Normal, sample +import DynamicPPL +using DynamicPPL: Sampler +import Random +using Test: @test, @testset +using Turing + @testset "ESS" begin @model function demo(x) m ~ Normal() @@ -12,7 +23,7 @@ end demodot_default = demodot(1.0) - @turing_testset "ESS constructor" begin + @testset "ESS constructor" begin Random.seed!(0) N = 500 @@ -31,7 +42,7 @@ c5 = sample(gdemo_default, s3, N) end - @numerical_testset "ESS inference" begin + @testset "ESS inference" begin Random.seed!(1) chain = sample(demo_default, ESS(), 5_000) check_numerical(chain, [:m], [0.8], atol = 0.1) @@ -71,3 +82,5 @@ ) end end + +end diff --git a/test/mcmc/gibbs.jl b/test/mcmc/gibbs.jl index ef2299dca..3e096dde1 100644 --- a/test/mcmc/gibbs.jl +++ b/test/mcmc/gibbs.jl @@ -1,5 +1,19 @@ +module GibbsTests + +using ..Models: MoGtest_default, gdemo, gdemo_default +using ..NumericalTests: check_MoGtest_default, check_gdemo, check_numerical +using Distributions: InverseGamma, Normal +using Distributions: sample +import ForwardDiff +import Random +import ReverseDiff +using Test: @test, @testset +using Turing +using Turing: Inference +using Turing.RandomMeasures: ChineseRestaurantProcess, DirichletProcess + @testset "Testing gibbs.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) - @turing_testset "gibbs constructor" begin + @testset "gibbs constructor" begin N = 500 s1 = Gibbs(HMC(0.1, 5, :s, :m; adtype=adbackend)) s2 = Gibbs(PG(10, :s, :m)) @@ -30,7 +44,7 @@ # it should return a Chains object @test sample(gdemo_default, g, N) isa MCMCChains.Chains end - @numerical_testset "gibbs inference" begin + @testset "gibbs inference" begin Random.seed!(100) alg = Gibbs(CSMC(15, :s), HMC(0.2, 4, :m; adtype=adbackend)) chain = sample(gdemo(1.5, 2.0), alg, 10_000) @@ -65,7 +79,7 @@ end end - @turing_testset "transitions" begin + @testset "transitions" begin @model function gdemo_copy() s ~ InverseGamma(2, 3) m ~ Normal(0, sqrt(s)) @@ -96,7 +110,7 @@ alg = Gibbs(MH(:s), HMC(0.2, 4, :m; adtype=adbackend)) sample(model, alg, 100; callback = callback) end - @turing_testset "dynamic model" begin + @testset "dynamic model" begin @model function imm(y, alpha, ::Type{M}=Vector{Float64}) where {M} N = length(y) rpm = DirichletProcess(alpha) @@ -122,3 +136,5 @@ sample(model, Gibbs(PG(10, :z), HMC(0.01, 4, :m; adtype=adbackend)), 100) end end + +end diff --git a/test/mcmc/gibbs_conditional.jl b/test/mcmc/gibbs_conditional.jl index d7752da64..2abec68a4 100644 --- a/test/mcmc/gibbs_conditional.jl +++ b/test/mcmc/gibbs_conditional.jl @@ -1,7 +1,23 @@ -@turing_testset "Testing gibbs conditionals.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) +module GibbsConditionalTests + +using ..Models: gdemo, gdemo_default +using ..NumericalTests: check_gdemo, check_numerical +import Clustering +using Distributions: Categorical, InverseGamma, Normal, sample +import ForwardDiff +using LinearAlgebra: Diagonal, I +import Random +import ReverseDiff +using StableRNGs: StableRNG +using StatsBase: counts +import StatsFuns +using Test: @test, @testset +using Turing + +@testset "Testing gibbs conditionals.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) Random.seed!(1000); rng = StableRNG(123) - @turing_testset "gdemo" begin + @testset "gdemo" begin # We consider the model # ```math # s ~ InverseGamma(2, 3) @@ -61,7 +77,7 @@ check_gdemo(chain) end - @turing_testset "GMM" begin + @testset "GMM" begin Random.seed!(1000); rng = StableRNG(123) # We consider the model # ```math @@ -145,3 +161,5 @@ end end end + +end diff --git a/test/mcmc/hmc.jl b/test/mcmc/hmc.jl index 755fa4b45..96ad1d0e0 100644 --- a/test/mcmc/hmc.jl +++ b/test/mcmc/hmc.jl @@ -1,7 +1,25 @@ +module HMCTests + +using ..Models: gdemo_default +#using ..Models: gdemo +using ..NumericalTests: check_gdemo, check_numerical +using Distributions: Bernoulli, Beta, Categorical, Dirichlet, Normal, Wishart, sample +import DynamicPPL +using DynamicPPL: Sampler +import ForwardDiff +using HypothesisTests: ApproximateTwoSampleKSTest, pvalue +import ReverseDiff +using LinearAlgebra: I, dot, vec +import Random +using StableRNGs: StableRNG +using StatsFuns: logistic +using Test: @test, @test_logs, @testset +using Turing + @testset "Testing hmc.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) # Set a seed rng = StableRNG(123) - @numerical_testset "constrained bounded" begin + @testset "constrained bounded" begin obs = [0,1,0,1,1,1,1,1,1,1] @model function constrained_test(obs) @@ -20,7 +38,7 @@ check_numerical(chain, [:p], [10/14], atol=0.1) end - @numerical_testset "constrained simplex" begin + @testset "constrained simplex" begin obs12 = [1,2,1,2,2,2,2,2,2,2] @model function constrained_simplex_test(obs12) @@ -40,12 +58,12 @@ check_numerical(chain, ["ps[1]", "ps[2]"], [5/16, 11/16], atol=0.015) end - @numerical_testset "hmc reverse diff" begin + @testset "hmc reverse diff" begin alg = HMC(0.1, 10; adtype=adbackend) res = sample(rng, gdemo_default, alg, 4000) check_gdemo(res, rtol=0.1) end - @turing_testset "matrix support" begin + @testset "matrix support" begin @model function hmcmatrixsup() v ~ Wishart(7, [1 0.5; 0.5 1]) end @@ -60,7 +78,7 @@ @test maximum(abs, mean(vs) - (7 * [1 0.5; 0.5 1])) <= 0.5 end - @turing_testset "multivariate support" begin + @testset "multivariate support" begin # Define NN flow function nn(x, b1, w11, w12, w13, bo, wo) h = tanh.([w11 w12 w13]' * x .+ b1) @@ -106,7 +124,7 @@ chain = sample(rng, bnn(ts), HMC(0.1, 5; adtype=adbackend), 10) end - @numerical_testset "hmcda inference" begin + @testset "hmcda inference" begin alg1 = HMCDA(500, 0.8, 0.015; adtype=adbackend) # alg2 = Gibbs(HMCDA(200, 0.8, 0.35, :m; adtype=adbackend), HMC(0.25, 3, :s; adtype=adbackend)) @@ -122,7 +140,7 @@ # @test mean(res2[:m]) ≈ 7/6 atol=0.2 end - @numerical_testset "hmcda+gibbs inference" begin + @testset "hmcda+gibbs inference" begin rng = StableRNG(123) Random.seed!(12345) # particle samplers do not support user-provided `rng` yet alg3 = Gibbs(PG(20, :s), HMCDA(500, 0.8, 0.25, :m; init_ϵ=0.05, adtype=adbackend)) @@ -131,7 +149,7 @@ check_gdemo(res3) end - @turing_testset "hmcda constructor" begin + @testset "hmcda constructor" begin alg = HMCDA(0.8, 0.75; adtype=adbackend) println(alg) sampler = Sampler(alg, gdemo_default) @@ -150,12 +168,12 @@ @test isa(alg, HMCDA) @test isa(sampler, Sampler{<:Turing.Hamiltonian}) end - @numerical_testset "nuts inference" begin + @testset "nuts inference" begin alg = NUTS(1000, 0.8; adtype=adbackend) res = sample(rng, gdemo_default, alg, 6000) check_gdemo(res) end - @turing_testset "nuts constructor" begin + @testset "nuts constructor" begin alg = NUTS(200, 0.65; adtype=adbackend) sampler = Sampler(alg, gdemo_default) @test DynamicPPL.alg_str(sampler) == "NUTS" @@ -168,7 +186,7 @@ sampler = Sampler(alg, gdemo_default) @test DynamicPPL.alg_str(sampler) == "NUTS" end - @turing_testset "check discard" begin + @testset "check discard" begin alg = NUTS(100, 0.8; adtype=adbackend) c1 = sample(rng, gdemo_default, alg, 500, discard_adapt=true) @@ -177,7 +195,7 @@ @test size(c1, 1) == 500 @test size(c2, 1) == 500 end - @turing_testset "AHMC resize" begin + @testset "AHMC resize" begin alg1 = Gibbs(PG(10, :m), NUTS(100, 0.65, :s; adtype=adbackend)) alg2 = Gibbs(PG(10, :m), HMC(0.1, 3, :s; adtype=adbackend)) alg3 = Gibbs(PG(10, :m), HMCDA(100, 0.65, 0.3, :s; adtype=adbackend)) @@ -186,7 +204,7 @@ @test sample(rng, gdemo_default, alg3, 300) isa Chains end - @turing_testset "Regression tests" begin + @testset "Regression tests" begin # https://github.com/TuringLang/DynamicPPL.jl/issues/27 @model function mwe1(::Type{T}=Float64) where {T<:Real} m = Matrix{T}(undef, 2, 3) @@ -209,7 +227,7 @@ end # issue #1923 - @turing_testset "reproducibility" begin + @testset "reproducibility" begin alg = NUTS(1000, 0.8; adtype=adbackend) res1 = sample(StableRNG(123), gdemo_default, alg, 1000) res2 = sample(StableRNG(123), gdemo_default, alg, 1000) @@ -217,7 +235,7 @@ @test Array(res1) == Array(res2) == Array(res3) end - @turing_testset "prior" begin + @testset "prior" begin @model function demo_hmc_prior() # NOTE: Used to use `InverseGamma(2, 3)` but this has infinite variance # which means that it's _very_ difficult to find a good tolerance in the test below:) @@ -230,7 +248,7 @@ check_numerical(chain, [:s, :m], [mean(truncated(Normal(3, 1); lower=0)), 0], atol=0.2) end - @turing_testset "warning for difficult init params" begin + @testset "warning for difficult init params" begin attempt = 0 @model function demo_warn_initial_params() x ~ Normal() @@ -250,7 +268,7 @@ # Disable on Julia <1.8 due to https://github.com/TuringLang/Turing.jl/pull/2197. # TODO: Remove this block once https://github.com/JuliaFolds2/BangBang.jl/pull/22 has been released. if VERSION ≥ v"1.8" - @turing_testset "(partially) issue: #2095" begin + @testset "(partially) issue: #2095" begin @model function vector_of_dirichlet(::Type{TV}=Vector{Float64}) where {TV} xs = Vector{TV}(undef, 2) xs[1] ~ Dirichlet(ones(5)) @@ -262,7 +280,7 @@ end end - @turing_testset "issue: #2195" begin + @testset "issue: #2195" begin @model function buggy_model() lb ~ Uniform(0, 1) ub ~ Uniform(1.5, 2) @@ -304,3 +322,5 @@ @test pvalue(ApproximateTwoSampleKSTest(vec(results), vec(results_prior))) > 0.01 end end + +end diff --git a/test/mcmc/is.jl b/test/mcmc/is.jl index e0bf80356..b2e26bba7 100644 --- a/test/mcmc/is.jl +++ b/test/mcmc/is.jl @@ -1,4 +1,13 @@ -@turing_testset "is.jl" begin +module ISTests + +using Distributions: Normal, sample +using DynamicPPL: logpdf +import Random +using StatsFuns: logsumexp +using Test: @test, @testset +using Turing + +@testset "is.jl" begin function reference(n) as = Vector{Float64}(undef, n) bs = Vector{Float64}(undef, n) @@ -46,7 +55,7 @@ @test chain.logevidence == ref.logevidence end - @turing_testset "logevidence" begin + @testset "logevidence" begin Random.seed!(100) @model function test() @@ -65,3 +74,5 @@ @test chains.logevidence ≈ - 2 * log(2) end end + +end diff --git a/test/mcmc/mh.jl b/test/mcmc/mh.jl index e902aba47..3b15d8069 100644 --- a/test/mcmc/mh.jl +++ b/test/mcmc/mh.jl @@ -1,5 +1,24 @@ +module MHTests + +import AdvancedMH +using Distributions: Bernoulli, Dirichlet, Exponential, InverseGamma, LogNormal, MvNormal, + Normal, sample +import DynamicPPL +using DynamicPPL: Sampler +using LinearAlgebra: I +import Random +using StableRNGs: StableRNG +using Test: @test, @testset +using Turing +using Turing.Inference: Inference + +using ..Models: gdemo_default, MoGtest_default +using ..NumericalTests: check_MoGtest_default, check_gdemo, check_numerical + +GKernel(var) = (x) -> Normal(x, sqrt.(var)) + @testset "mh.jl" begin - @turing_testset "mh constructor" begin + @testset "mh constructor" begin Random.seed!(10) N = 500 s1 = MH( @@ -26,7 +45,7 @@ # s6 = externalsampler(MH(gdemo_default, proposal_type=AdvancedMH.StaticProposal)) # c6 = sample(gdemo_default, s6, N) end - @numerical_testset "mh inference" begin + @testset "mh inference" begin Random.seed!(125) alg = MH() chain = sample(gdemo_default, alg, 10_000) @@ -57,7 +76,7 @@ end # Test MH shape passing. - @turing_testset "shape" begin + @testset "shape" begin @model function M(mu, sigma, observable) z ~ MvNormal(mu, sigma) @@ -94,7 +113,7 @@ @test chain isa MCMCChains.Chains end - @turing_testset "proposal matrix" begin + @testset "proposal matrix" begin Random.seed!(100) mat = [1.0 -0.05; -0.05 1.0] @@ -117,7 +136,7 @@ check_gdemo(chain2) end - @turing_testset "gibbs MH proposal matrix" begin + @testset "gibbs MH proposal matrix" begin # https://github.com/TuringLang/Turing.jl/issues/1556 # generate data @@ -167,7 +186,7 @@ # Disable on Julia <1.8 due to https://github.com/TuringLang/Turing.jl/pull/2197. # TODO: Remove this block once https://github.com/JuliaFolds2/BangBang.jl/pull/22 has been released. if VERSION ≥ v"1.8" - @turing_testset "vector of multivariate distributions" begin + @testset "vector of multivariate distributions" begin @model function test(k) T = Vector{Vector{Float64}}(undef, k) for i in 1:k @@ -189,7 +208,7 @@ end end - @turing_testset "MH link/invlink" begin + @testset "MH link/invlink" begin vi_base = DynamicPPL.VarInfo(gdemo_default) # Don't link when no proposals are given since we're using priors @@ -229,7 +248,7 @@ @test !DynamicPPL.islinked(vi, spl) end - @turing_testset "prior" begin + @testset "prior" begin # HACK: MH can be so bad for this prior model for some reason that it's difficult to # find a non-trivial `atol` where the tests will pass for all seeds. Hence we fix it :/ rng = StableRNG(10) @@ -241,7 +260,7 @@ check_numerical(chain, [:s, :m], [mean(InverseGamma(2, 3)), 0], atol=0.3) end - @turing_testset "`filldist` proposal (issue #2180)" begin + @testset "`filldist` proposal (issue #2180)" begin @model demo_filldist_issue2180() = x ~ MvNormal(zeros(3), I) chain = sample( demo_filldist_issue2180(), @@ -251,3 +270,5 @@ check_numerical(chain, [Symbol("x[1]"), Symbol("x[2]"), Symbol("x[3]")], [0, 0, 0], atol=0.2) end end + +end diff --git a/test/mcmc/particle_mcmc.jl b/test/mcmc/particle_mcmc.jl index de6e65e40..01510f2a8 100644 --- a/test/mcmc/particle_mcmc.jl +++ b/test/mcmc/particle_mcmc.jl @@ -1,5 +1,16 @@ +module ParticleMCMCTests + +using ..Models: gdemo_default +#using ..Models: MoGtest, MoGtest_default +using AdvancedPS: ResampleWithESSThreshold, resample_systematic, resample_multinomial +using Distributions: Bernoulli, Beta, Gamma, Normal, sample +using DynamicPPL: getspace +import Random +using Test: @test, @test_throws, @testset +using Turing + @testset "SMC" begin - @turing_testset "constructor" begin + @testset "constructor" begin s = SMC() @test s.resampler == ResampleWithESSThreshold() @test getspace(s) === () @@ -45,7 +56,7 @@ @test getspace(s) === (:x,) end - @turing_testset "models" begin + @testset "models" begin @model function normal() a ~ Normal(4,5) 3 ~ Normal(a,2) @@ -70,7 +81,7 @@ @test_throws ErrorException sample(fail_smc(), SMC(), 100) end - @turing_testset "logevidence" begin + @testset "logevidence" begin Random.seed!(100) @model function test() @@ -91,7 +102,7 @@ end @testset "PG" begin - @turing_testset "constructor" begin + @testset "constructor" begin s = PG(10) @test s.nparticles == 10 @test s.resampler == ResampleWithESSThreshold() @@ -148,7 +159,7 @@ end @test getspace(s) === (:x,) end - @turing_testset "logevidence" begin + @testset "logevidence" begin Random.seed!(100) @model function test() @@ -168,21 +179,21 @@ end end # https://github.com/TuringLang/Turing.jl/issues/1598 - @turing_testset "reference particle" begin + @testset "reference particle" begin c = sample(gdemo_default, PG(1), 1_000) @test length(unique(c[:m])) == 1 @test length(unique(c[:s])) == 1 end # https://github.com/TuringLang/Turing.jl/issues/2007 - @turing_testset "keyword arguments not supported" begin + @testset "keyword arguments not supported" begin @model kwarg_demo(; x = 2) = return x @test_throws ErrorException sample(kwarg_demo(), PG(1), 10) end end # @testset "pmmh.jl" begin -# @turing_testset "pmmh constructor" begin +# @testset "pmmh constructor" begin # N = 2000 # s1 = PMMH(N, SMC(10, :s), MH(1,(:m, s -> Normal(s, sqrt(1))))) # s2 = PMMH(N, SMC(10, :s), MH(1, :m)) @@ -218,7 +229,7 @@ end # end # @testset "ipmcmc.jl" begin -# @turing_testset "ipmcmc constructor" begin +# @testset "ipmcmc constructor" begin # Random.seed!(125) # # N = 50 @@ -239,3 +250,4 @@ end # end # end +end diff --git a/test/mcmc/sghmc.jl b/test/mcmc/sghmc.jl index 4405b505a..f65ca1c37 100644 --- a/test/mcmc/sghmc.jl +++ b/test/mcmc/sghmc.jl @@ -1,5 +1,17 @@ +module SGHMCTests + +using ..Models: gdemo_default +using ..NumericalTests: check_gdemo +using Distributions: sample +import ForwardDiff +using LinearAlgebra: dot +import ReverseDiff +using StableRNGs: StableRNG +using Test: @test, @testset +using Turing + @testset "Testing sghmc.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) - @turing_testset "sghmc constructor" begin + @testset "sghmc constructor" begin alg = SGHMC(; learning_rate=0.01, momentum_decay=0.1, adtype=adbackend) @test alg isa SGHMC sampler = Turing.Sampler(alg) @@ -15,7 +27,7 @@ sampler = Turing.Sampler(alg) @test sampler isa Turing.Sampler{<:SGHMC} end - @numerical_testset "sghmc inference" begin + @testset "sghmc inference" begin rng = StableRNG(123) alg = SGHMC(; learning_rate=0.02, momentum_decay=0.5, adtype=adbackend) @@ -25,7 +37,7 @@ end @testset "Testing sgld.jl with $adbackend" for adbackend in (AutoForwardDiff(; chunksize=0), AutoReverseDiff(false)) - @turing_testset "sgld constructor" begin + @testset "sgld constructor" begin alg = SGLD(; stepsize=PolynomialStepsize(0.25), adtype=adbackend) @test alg isa SGLD sampler = Turing.Sampler(alg) @@ -41,7 +53,7 @@ end sampler = Turing.Sampler(alg) @test sampler isa Turing.Sampler{<:SGLD} end - @numerical_testset "sgld inference" begin + @testset "sgld inference" begin rng = StableRNG(1) chain = sample(rng, gdemo_default, SGLD(; stepsize = PolynomialStepsize(0.5)), 20_000) @@ -55,3 +67,5 @@ end @test m_weighted ≈ 7/6 atol=0.2 end end + +end diff --git a/test/mcmc/utilities.jl b/test/mcmc/utilities.jl index 3a3517f60..2b5e04b2d 100644 --- a/test/mcmc/utilities.jl +++ b/test/mcmc/utilities.jl @@ -1,3 +1,13 @@ +module MCMCUtilitiesTests + +using ..Models: gdemo_default +using Distributions: Normal, sample, truncated +using LinearAlgebra: I, vec +import Random +using Random: MersenneTwister +using Test: @test, @testset +using Turing + @testset "predict" begin Random.seed!(100) @@ -138,3 +148,5 @@ end @test chain.info.stop_time isa Float64 @test chain.info.start_time ≤ chain.info.stop_time end + +end diff --git a/test/optimisation/Optimisation.jl b/test/optimisation/Optimisation.jl index 5f38b6ff1..d1fbeb95e 100644 --- a/test/optimisation/Optimisation.jl +++ b/test/optimisation/Optimisation.jl @@ -1,3 +1,21 @@ +module OptimisationTests + +using ..Models: gdemo, gdemo_default +using Distributions +using Distributions.FillArrays: Zeros +import DynamicPPL +using LinearAlgebra: I +import Random +using Optimization +using Optimization: Optimization +using OptimizationBBO: OptimizationBBO +using OptimizationNLopt: OptimizationNLopt +using OptimizationOptimJL: OptimizationOptimJL +import StatsBase +using StatsBase: coef, coefnames, coeftable, informationmatrix, stderror, vcov +using Test: @test, @testset, @test_throws +using Turing + @testset "Optimisation" begin # The `stats` field is populated only in newer versions of OptimizationOptimJL and @@ -79,7 +97,7 @@ end end - @numerical_testset "gdemo" begin + @testset "gdemo" begin """ check_success(result, true_value, true_logp, check_retcode=true) @@ -552,3 +570,5 @@ @test result.values[:y] ≈ 100 atol = 1e-1 end end + +end diff --git a/test/runtests.jl b/test/runtests.jl index 70bd532c1..f4f0ca9b3 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,64 +1,34 @@ -using AbstractMCMC -using AdvancedMH -using AdvancedPS -using Clustering -using Distributions -using Distributions.FillArrays -using DistributionsAD -using FiniteDifferences -using ForwardDiff -using MCMCChains -using NamedArrays -using Optim: Optim -using Optimization: Optimization -using OptimizationBBO: OptimizationBBO -using OptimizationOptimJL: OptimizationOptimJL -using OptimizationNLopt: OptimizationNLopt -using PDMats -using ReverseDiff -using SpecialFunctions -using StatsBase -using StatsFuns -using HypothesisTests -using Tracker -using Turing -using Turing.Inference -using Turing.RandomMeasures -using Zygote - -using LinearAlgebra +include("test_utils/SelectiveTests.jl") +using .SelectiveTests: isincluded, parse_args using Pkg -using Random using Test -using StableRNGs - -using AdvancedPS: ResampleWithESSThreshold, resample_systematic, resample_multinomial -using AdvancedVI: TruncatedADAGrad, DecayedADAGrad, apply! -using Distributions: Binomial, logpdf -using DynamicPPL: getval, getlogp -using ForwardDiff: Dual -using MCMCChains: Chains -using StatsFuns: binomlogpdf, logistic, logsumexp using TimerOutputs: TimerOutputs, @timeit -using Turing: BinomialLogit, Sampler, SampleFromPrior, NUTS, - Variational, getspace -using Turing.Essential: TuringDenseMvNormal, TuringDiagMvNormal -using Turing.Variational: TruncatedADAGrad, DecayedADAGrad, AdvancedVI +import Turing -import LogDensityProblems -import LogDensityProblemsAD +include(pkgdir(Turing) * "/test/test_utils/models.jl") +include(pkgdir(Turing) * "/test/test_utils/numerical_tests.jl") -setprogress!(false) +Turing.setprogress!(false) -include(pkgdir(Turing)*"/test/test_utils/AllUtils.jl") +included_paths, excluded_paths = parse_args(ARGS) -# Collect timing and allocations information to show in a clear way. +# Filter which tests to run and collect timing and allocations information to show in a +# clear way. const TIMEROUTPUT = TimerOutputs.TimerOutput() -macro timeit_include(path::AbstractString) :(@timeit TIMEROUTPUT $path include($path)) end +macro timeit_include(path::AbstractString) + return quote + if isincluded($path, included_paths, excluded_paths) + @timeit TIMEROUTPUT $path include($path) + else + println("Skipping tests in $($path)") + end + end +end @testset "Turing" begin @testset "essential" begin @timeit_include("essential/ad.jl") + @timeit_include("essential/container.jl") end @testset "samplers (without AD)" begin diff --git a/test/stdlib/RandomMeasures.jl b/test/stdlib/RandomMeasures.jl index b56504451..e2be8fb2d 100644 --- a/test/stdlib/RandomMeasures.jl +++ b/test/stdlib/RandomMeasures.jl @@ -1,3 +1,11 @@ +module RandomMeasuresTests + +using Distributions: Normal, sample +import Random +using Test: @test, @testset +using Turing +using Turing.RandomMeasures: ChineseRestaurantProcess, DirichletProcess + @testset "RandomMeasures.jl" begin @testset "Infinite Mixture Model" begin @model function infiniteGMM(x) @@ -72,7 +80,7 @@ # [[1], [2], [3, 4]], # [[1], [2], [3], [4]]] - # @turing_testset "chinese restaurant processes" begin + # @testset "chinese restaurant processes" begin # # Data # data = [-2,2,-1.5,1.5] @@ -146,7 +154,7 @@ # @test discr < 0.2 # end # @testset "distributions" begin - # @turing_testset "Representations" begin + # @testset "Representations" begin # d = StickBreakingProcess(DirichletProcess(1.0)) # @test minimum(d) == 0 # @test maximum(d) == 1 @@ -159,7 +167,7 @@ # @test minimum(d) == 1 # @test maximum(d) == 3 # end - # @turing_testset "Dirichlet Process" begin + # @testset "Dirichlet Process" begin # α = 0.1 # N = 10_000 @@ -187,7 +195,7 @@ # @test p[2] ≈ q[2] atol=0.1 # @test p[3] ≈ q[3] atol=0.1 # end - # @turing_testset "Pitman-Yor Process" begin + # @testset "Pitman-Yor Process" begin # a = 0.5 # θ = 0.1 @@ -218,7 +226,7 @@ # @test p[3] ≈ q[3] atol=0.1 # end # end - # @turing_testset "stick breaking" begin + # @testset "stick breaking" begin # # Data # data = [-2,2,-1.5,1.5] @@ -304,7 +312,7 @@ # @test l2 < 0.1 # @test discr < 0.3 # end - # @turing_testset "size-based sampling" begin + # @testset "size-based sampling" begin # # Data # data = [-2,2,-1.5,1.5] @@ -381,3 +389,5 @@ # @test discr < 0.2 # end end + +end diff --git a/test/stdlib/distributions.jl b/test/stdlib/distributions.jl index 9024bba9b..f1ce701da 100644 --- a/test/stdlib/distributions.jl +++ b/test/stdlib/distributions.jl @@ -1,6 +1,17 @@ +module DistributionsTests + +using ..NumericalTests: check_dist_numerical +using Distributions +using LinearAlgebra: I +import Random +using StableRNGs: StableRNG +using StatsFuns: logistic +using Test: @testset, @test +using Turing + @testset "distributions.jl" begin rng = StableRNG(12345) - @turing_testset "distributions functions" begin + @testset "distributions functions" begin ns = 10 logitp = randn(rng) d1 = BinomialLogit(ns, logitp) @@ -9,7 +20,7 @@ @test logpdf(d1, k) ≈ logpdf(d2, k) end - @turing_testset "distributions functions" begin + @testset "distributions functions" begin d = OrderedLogistic(-2, [-1, 1]) n = 1_000_000 @@ -21,7 +32,7 @@ @test all(((x, y),) -> abs(x - y) < 0.001, zip(p, pmf)) end - @turing_testset "distributions functions" begin + @testset "distributions functions" begin λ = .01:.01:5 LLp = @. logpdf(Poisson(λ),1) LLlp = @. logpdf(LogPoisson(log(λ)),1) @@ -29,7 +40,7 @@ end - @numerical_testset "single distribution correctness" begin + @testset "single distribution correctness" begin rng = StableRNG(1) n_samples = 10_000 @@ -99,7 +110,7 @@ InverseWishart(7, [1.0 0.5; 0.5 1.0]), ] - @numerical_testset "Correctness test for single distributions" begin + @testset "Correctness test for single distributions" begin for (dist_set, dist_list) ∈ [ ("UnivariateDistribution", dist_uni), ("MultivariateDistribution", dist_multi), @@ -127,3 +138,5 @@ end end end + +end diff --git a/test/test_utils/AllUtils.jl b/test/test_utils/AllUtils.jl deleted file mode 100644 index 4ca57d838..000000000 --- a/test/test_utils/AllUtils.jl +++ /dev/null @@ -1,7 +0,0 @@ -# Import utility functions and reused models. -include("staging.jl") -include("numerical_tests.jl") -include("ad_utils.jl") -include("models.jl") -include("random_measure_utils.jl") -include("testing_functions.jl") diff --git a/test/test_utils/SelectiveTests.jl b/test/test_utils/SelectiveTests.jl new file mode 100644 index 000000000..d4a20bc91 --- /dev/null +++ b/test/test_utils/SelectiveTests.jl @@ -0,0 +1,59 @@ +module SelectiveTests + +""" + parse_args(args) + +Parse the command line arguments to get the included and excluded test file paths. + +The arguments are expected to be in the form: +``` +a b c --skip d e f +``` +where a test file is to be included if and only if +1) the argument list is empty, in which case all files are included, +or +2) + a) it has as a substring of its path any of the strings `a`, `b`, or `c`, + and + b) it does not have as a substring of its path any of the strings `d`, `e`, or `f`. + +The substring checks are done case-insensitively. +""" +function parse_args(args) + included_paths = Vector{String}() + excluded_paths = Vector{String}() + for (i, arg) in enumerate(args) + if arg == "--skip" + append!(excluded_paths, args[i+1:end]) + break + else + push!(included_paths, arg) + end + end + return included_paths, excluded_paths +end + +""" + isincluded(filepath, included_paths, excluded_paths) + +Check if a file should be included in the tests. + +`included_paths` and `excluded_paths` are the output of [`parse_args`](@ref). + +See [`parse_args`](@ref) for the logic of when a file should be included. +""" +function isincluded( + filepath::AbstractString, + included_paths::Vector{<:AbstractString}, + excluded_paths::Vector{<:AbstractString}, +) + if any(excl -> occursin(lowercase(excl), lowercase(filepath)), excluded_paths) + return false + end + if any(incl -> occursin(lowercase(incl), lowercase(filepath)), included_paths) + return true + end + return isempty(included_paths) +end + +end diff --git a/test/test_utils/ad_utils.jl b/test/test_utils/ad_utils.jl deleted file mode 100644 index e48a46ae2..000000000 --- a/test/test_utils/ad_utils.jl +++ /dev/null @@ -1,106 +0,0 @@ -""" - test_reverse_mode_ad(forward, f, ȳ, x...; rtol=1e-6, atol=1e-6) - -Check that the reverse-mode sensitivities produced by an AD library are correct for `f` -at `x...`, given sensitivity `ȳ` w.r.t. `y = f(x...)` up to `rtol` and `atol`. -""" -function test_reverse_mode_ad(f, ȳ, x...; rtol=1e-6, atol=1e-6) - # Perform a regular forwards-pass. - y = f(x...) - - # Use Tracker to compute reverse-mode sensitivities. - y_tracker, back_tracker = Tracker.forward(f, x...) - x̄s_tracker = back_tracker(ȳ) - - # Use Zygote to compute reverse-mode sensitivities. - y_zygote, back_zygote = Zygote.pullback(f, x...) - x̄s_zygote = back_zygote(ȳ) - - test_rd = length(x) == 1 && y isa Number - if test_rd - # Use ReverseDiff to compute reverse-mode sensitivities. - if x[1] isa Array - x̄s_rd = similar(x[1]) - tp = ReverseDiff.GradientTape(x -> f(x), x[1]) - ReverseDiff.gradient!(x̄s_rd, tp, x[1]) - x̄s_rd .*= ȳ - y_rd = ReverseDiff.value(tp.output) - @assert y_rd isa Number - else - x̄s_rd = [x[1]] - tp = ReverseDiff.GradientTape(x -> f(x[1]), [x[1]]) - ReverseDiff.gradient!(x̄s_rd, tp, [x[1]]) - y_rd = ReverseDiff.value(tp.output)[1] - x̄s_rd = x̄s_rd[1] * ȳ - @assert y_rd isa Number - end - end - - # Use finite differencing to compute reverse-mode sensitivities. - x̄s_fdm = FDM.j′vp(central_fdm(5, 1), f, ȳ, x...) - - # Check that Tracker forwards-pass produces the correct answer. - @test isapprox(y, Tracker.data(y_tracker), atol=atol, rtol=rtol) - - # Check that Zygpte forwards-pass produces the correct answer. - @test isapprox(y, y_zygote, atol=atol, rtol=rtol) - - if test_rd - # Check that ReverseDiff forwards-pass produces the correct answer. - @test isapprox(y, y_rd, atol=atol, rtol=rtol) - end - - # Check that Tracker reverse-mode sensitivities are correct. - @test all(zip(x̄s_tracker, x̄s_fdm)) do (x̄_tracker, x̄_fdm) - isapprox(Tracker.data(x̄_tracker), x̄_fdm; atol=atol, rtol=rtol) - end - - # Check that Zygote reverse-mode sensitivities are correct. - @test all(zip(x̄s_zygote, x̄s_fdm)) do (x̄_zygote, x̄_fdm) - isapprox(x̄_zygote, x̄_fdm; atol=atol, rtol=rtol) - end - - if test_rd - # Check that ReverseDiff reverse-mode sensitivities are correct. - @test isapprox(x̄s_rd, x̄s_zygote[1]; atol=atol, rtol=rtol) - end -end - -function test_model_ad(model, f, syms::Vector{Symbol}) - # Set up VI. - vi = Turing.VarInfo(model) - - # Collect symbols. - vnms = Vector(undef, length(syms)) - vnvals = Vector{Float64}() - for i in 1:length(syms) - s = syms[i] - vnms[i] = getfield(vi.metadata, s).vns[1] - - vals = getval(vi, vnms[i]) - for i in eachindex(vals) - push!(vnvals, vals[i]) - end - end - - # Compute primal. - x = vec(vnvals) - logp = f(x) - - # Call ForwardDiff's AD directly. - grad_FWAD = sort(ForwardDiff.gradient(f, x)) - - # Compare with `logdensity_and_gradient`. - z = vi[SampleFromPrior()] - for chunksize in (0, 1, 10), standardtag in (true, false, 0, 3) - ℓ = LogDensityProblemsAD.ADgradient( - Turing.AutoForwardDiff(; chunksize=chunksize, tag=standardtag), - Turing.LogDensityFunction(vi, model, SampleFromPrior(), DynamicPPL.DefaultContext()), - ) - l, ∇E = LogDensityProblems.logdensity_and_gradient(ℓ, z) - - # Compare result - @test l ≈ logp - @test sort(∇E) ≈ grad_FWAD atol = 1e-9 - end -end diff --git a/test/test_utils/models.jl b/test/test_utils/models.jl index fc392b050..94790a041 100644 --- a/test/test_utils/models.jl +++ b/test/test_utils/models.jl @@ -1,3 +1,11 @@ +module Models + +export MoGtest, MoGtest_default, MoGtest_default_z_vector, MoGtest_z_vector, gdemo, gdemo_d, + gdemo_default + +using Distributions +using Turing: @model + # The old-gdemo model. @model function gdemo(x, y) s ~ InverseGamma(2, 3) @@ -83,5 +91,4 @@ end MoGtest_default_z_vector = MoGtest_z_vector([1.0 1.0 4.0 4.0]) -# Declare empty model to make the Sampler constructor work. -@model empty_model() = x = 1 +end diff --git a/test/test_utils/numerical_tests.jl b/test/test_utils/numerical_tests.jl index 333a3f14a..cb583b517 100644 --- a/test/test_utils/numerical_tests.jl +++ b/test/test_utils/numerical_tests.jl @@ -1,3 +1,12 @@ +module NumericalTests + +using Distributions +using MCMCChains: namesingroup +using Test: @test, @testset + +export check_MoGtest_default, check_MoGtest_default_z_vector, check_dist_numerical, + check_gdemo, check_numerical + function check_dist_numerical(dist, chn; mean_tol = 0.1, var_atol = 1.0, var_tol = 0.5) @testset "numerical" begin # Extract values. @@ -71,3 +80,5 @@ function check_MoGtest_default_z_vector(chain; atol=0.2, rtol=0.0) [1.0, 1.0, 2.0, 2.0, 1.0, 4.0], atol=atol, rtol=rtol) end + +end diff --git a/test/test_utils/random_measure_utils.jl b/test/test_utils/random_measure_utils.jl deleted file mode 100644 index 63c84c11a..000000000 --- a/test/test_utils/random_measure_utils.jl +++ /dev/null @@ -1,34 +0,0 @@ -function compute_log_joint(observations, partition, tau0, tau1, sigma, theta) - n = length(observations) - k = length(partition) - prob = k*log(sigma) + lgamma(theta) + lgamma(theta/sigma + k) - lgamma(theta/sigma) - lgamma(theta + n) - for cluster in partition - prob += lgamma(length(cluster) - sigma) - lgamma(1 - sigma) - prob += compute_log_conditional_observations(observations, cluster, tau0, tau1) - end - prob -end - -function compute_log_conditional_observations(observations, cluster, tau0, tau1) - nl = length(cluster) - prob = (nl/2)*log(tau1) - (nl/2)*log(2*pi) + 0.5*log(tau0) + 0.5*log(tau0+nl) - prob += -tau1/2*(sum(observations)) + 0.5*(tau0*mu_0+tau1*sum(observations[cluster]))^2/(tau0+nl*tau1) - prob -end - -# Test of similarity between distributions -function correct_posterior(empirical_probs, data, partitions, τ0, τ1, σ, θ) - true_log_probs = map(p -> compute_log_joint(data, p, τ0, τ1, σ, θ), partitions) - true_probs = exp.(true_log_probs) - true_probs /= sum(true_probs) - - empirical_probs /= sum(empirical_probs) - - # compare distribitions - # L2 - L2 = sum((empirical_probs - true_probs).^2) - - # Discrepancy - discr = maximum(abs.(empirical_probs - true_probs)) - return L2, discr -end diff --git a/test/test_utils/staging.jl b/test/test_utils/staging.jl deleted file mode 100644 index 15d5853d0..000000000 --- a/test/test_utils/staging.jl +++ /dev/null @@ -1,52 +0,0 @@ -function get_stage() - # Appveyor uses "True" for non-Ubuntu images. - if get(ENV, "APPVEYOR", "") == "True" || get(ENV, "APPVEYOR", "") == "true" - return "nonnumeric" - end - - # Handle Travis and Github Actions specially. - if get(ENV, "TRAVIS", "") == "true" || get(ENV, "GITHUB_ACTIONS", "") == "true" - if "STAGE" in keys(ENV) - return ENV["STAGE"] - else - return "all" - end - end - - return "all" -end - -function do_test(stage_str) - stg = get_stage() - - # If the tests are being run by Appveyor, don't run - # any numerical tests. - if stg == "nonnumeric" - if stage_str == "numerical" - return false - else - return true - end - end - - # Otherwise run the regular testing procedure. - if stg == "all" || stg == stage_str - return true - end - - return false -end - -macro stage_testset(stage_string::String, args...) - if do_test(stage_string) - return esc(:(@testset($(args...)))) - end -end - -macro numerical_testset(args...) - esc(:(@stage_testset "numerical" $(args...))) -end - -macro turing_testset(args...) - esc(:(@stage_testset "test" $(args...))) -end diff --git a/test/test_utils/testing_functions.jl b/test/test_utils/testing_functions.jl deleted file mode 100644 index 4f00d7793..000000000 --- a/test/test_utils/testing_functions.jl +++ /dev/null @@ -1,26 +0,0 @@ -GKernel(var) = (x) -> Normal(x, sqrt.(var)) - -function randr(vi::Turing.VarInfo, - vn::Turing.VarName, - dist::Distribution, - spl::Turing.Sampler, - count::Bool = false) - if ~haskey(vi, vn) - r = rand(dist) - Turing.push!(vi, vn, r, dist, spl) - return r - elseif is_flagged(vi, vn, "del") - unset_flag!(vi, vn, "del") - r = rand(dist) - Turing.RandomVariables.setval!(vi, Turing.vectorize(dist, r), vn) - return r - else - if count Turing.checkindex(vn, vi, spl) end - Turing.updategid!(vi, vn, spl) - return vi[vn] - end -end - -function insdelim(c, deli=",") - return reduce((e, res) -> append!(e, [res, deli]), c; init = [])[1:end-1] -end diff --git a/test/variational/advi.jl b/test/variational/advi.jl index 62e5ac400..0f77150c0 100644 --- a/test/variational/advi.jl +++ b/test/variational/advi.jl @@ -1,5 +1,19 @@ +module AdvancedVITests + +using ..Models: gdemo_default +using ..NumericalTests: check_gdemo +import AdvancedVI +using AdvancedVI: TruncatedADAGrad, DecayedADAGrad +using Distributions: Dirichlet, Normal +using LinearAlgebra: I +using MCMCChains: Chains +import Random +using Test: @test, @testset +using Turing +using Turing.Essential: TuringDiagMvNormal + @testset "advi.jl" begin - @turing_testset "advi constructor" begin + @testset "advi constructor" begin Random.seed!(0) N = 500 @@ -7,7 +21,7 @@ q = vi(gdemo_default, s1) c1 = rand(q, N) end - @numerical_testset "advi inference" begin + @testset "advi inference" begin @testset for opt in [TruncatedADAGrad(), DecayedADAGrad()] Random.seed!(1) N = 500 @@ -22,7 +36,7 @@ end end - @turing_testset "advi different interfaces" begin + @testset "advi different interfaces" begin Random.seed!(1234) target = MvNormal(zeros(2), I) @@ -50,7 +64,7 @@ # regression test for: # https://github.com/TuringLang/Turing.jl/issues/2065 - @turing_testset "simplex bijector" begin + @testset "simplex bijector" begin @model function dirichlet() x ~ Dirichlet([1.0,1.0]) return x @@ -72,7 +86,7 @@ end # Ref: https://github.com/TuringLang/Turing.jl/issues/2205 - @turing_testset "with `condition` (issue #2205)" begin + @testset "with `condition` (issue #2205)" begin @model function demo_issue2205() x ~ Normal() y ~ Normal(x, 1) @@ -91,3 +105,5 @@ @test var_est ≈ var_true atol=0.2 end end + +end diff --git a/test/variational/optimisers.jl b/test/variational/optimisers.jl index 46a81aa0d..8063cdf2e 100644 --- a/test/variational/optimisers.jl +++ b/test/variational/optimisers.jl @@ -1,3 +1,11 @@ +module VariationalOptimisersTests + +using AdvancedVI: DecayedADAGrad, TruncatedADAGrad, apply! +import ForwardDiff +import ReverseDiff +using Test: @test, @testset +using Turing + function test_opt(ADPack, opt) θ = randn(10, 10) θ_fit = randn(10, 10) @@ -17,3 +25,5 @@ end for opt in [TruncatedADAGrad(), DecayedADAGrad(1e-2)] test_opt(ReverseDiff, opt) end + +end