Skip to content

Commit

Permalink
Rename Turing.Core to Turing.Essential (#1762)
Browse files Browse the repository at this point in the history
* Rename `Turing.Core` to `Turing.Essential`

* Deprecate Turing.Core

Co-authored-by: Tor Erlend Fjelde <[email protected]>

Co-authored-by: Tor Erlend Fjelde <[email protected]>
  • Loading branch information
devmotion and torfjelde authored Jan 14, 2022
1 parent ac99fb0 commit 9087412
Show file tree
Hide file tree
Showing 11 changed files with 31 additions and 30 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "Turing"
uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"
version = "0.19.3"
version = "0.19.4"

[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
Expand Down
2 changes: 1 addition & 1 deletion docs/src/library/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ CurrentModule = Turing
## Index

```@index
Modules = [Turing, Turing.Core, Turing.Inference, Libtask]
Modules = [Turing, Turing.Essential, Turing.Inference, Libtask]
```

## Modelling
Expand Down
7 changes: 4 additions & 3 deletions src/Turing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@ include("stdlib/distributions.jl")
include("stdlib/RandomMeasures.jl")
include("utilities/Utilities.jl")
using .Utilities
include("core/Core.jl")
using .Core
include("essential/Essential.jl")
Base.@deprecate_binding Core Essential false
using .Essential
include("inference/Inference.jl") # inference algorithms
using .Inference
include("variational/VariationalInference.jl")
Expand Down Expand Up @@ -140,7 +141,7 @@ export @model, # modelling
MAP,
MLE,
get_parameter_bounds,
optim_objective,
optim_objective,
optim_function,
optim_problem
end
2 changes: 1 addition & 1 deletion src/core/Core.jl → src/essential/Essential.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
module Core
module Essential

using DistributionsAD, Bijectors
using Libtask, ForwardDiff, Random
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
2 changes: 1 addition & 1 deletion src/inference/AdvancedSMC.jl
Original file line number Diff line number Diff line change
Expand Up @@ -365,6 +365,6 @@ function AdvancedPS.Trace(
)
newvarinfo = deepcopy(varinfo)
DynamicPPL.reset_num_produce!(newvarinfo)
f = Turing.Core.TracedModel(model, sampler, newvarinfo)
f = Turing.Essential.TracedModel(model, sampler, newvarinfo)
return AdvancedPS.Trace(f)
end
14 changes: 7 additions & 7 deletions src/inference/Inference.jl
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
module Inference

using ..Core
using ..Essential
using ..Utilities
using DynamicPPL: Metadata, VarInfo, TypedVarInfo,
using DynamicPPL: Metadata, VarInfo, TypedVarInfo,
islinked, invlink!, link!,
setindex!!, push!!,
setlogp!!, getlogp,
Expand All @@ -28,7 +28,7 @@ import AdvancedHMC; const AHMC = AdvancedHMC
import AdvancedMH; const AMH = AdvancedMH
import AdvancedPS
import BangBang
import ..Core: getchunksize, getADbackend
import ..Essential: getchunksize, getADbackend
import EllipticalSliceSampling
import Random
import MCMCChains
Expand Down Expand Up @@ -260,7 +260,7 @@ function _params_to_array(ts::Vector)
return Dict(nms[j] => vs[j] for j in 1:length(vs))
end
names = collect(names_set)
vals = [get(dicts[i], key, missing) for i in eachindex(dicts),
vals = [get(dicts[i], key, missing) for i in eachindex(dicts),
(j, key) in enumerate(names)]

return names, vals
Expand Down Expand Up @@ -444,7 +444,7 @@ for alg in (:HMC, :HMCDA, :NUTS, :SGLD, :SGHMC)
end

function DynamicPPL.get_matching_type(
spl::Sampler{<:Union{PG, SMC}},
spl::Sampler{<:Union{PG, SMC}},
vi,
::Type{TV},
) where {T, N, TV <: Array{T, N}}
Expand Down Expand Up @@ -566,8 +566,8 @@ end
transitions_from_chain(
[rng::AbstractRNG,]
model::Model,
chain::MCMCChains.Chains;
model::Model,
chain::MCMCChains.Chains;
sampler = DynamicPPL.SampleFromPrior()
)
Expand Down
24 changes: 12 additions & 12 deletions test/core/ad.jl → test/essential/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,11 @@
sample(dir(), HMC(0.01, 1), 1000);
Turing.setrdcache(true)
sample(dir(), HMC(0.01, 1), 1000);
caches = Memoization.find_caches(Turing.Core.memoized_taperesult)
caches = Memoization.find_caches(Turing.Essential.memoized_taperesult)
@test length(caches) == 1
@test !isempty(first(values(caches)))
Turing.emptyrdcache()
caches = Memoization.find_caches(Turing.Core.memoized_taperesult)
caches = Memoization.find_caches(Turing.Essential.memoized_taperesult)
@test length(caches) == 1
@test isempty(first(values(caches)))
end
Expand Down Expand Up @@ -111,35 +111,35 @@
@model function tst(x, ::Type{TV}=Vector{Float64}) where {TV}
params = TV(undef, 2)
@. params ~ Normal(0, 1)

x ~ MvNormal(params, I)
end

function make_logjoint(model::DynamicPPL.Model, ctx::DynamicPPL.AbstractContext)
# setup
varinfo_init = Turing.VarInfo(model)
spl = DynamicPPL.SampleFromPrior()
spl = DynamicPPL.SampleFromPrior()
DynamicPPL.link!(varinfo_init, spl)

function logπ(z; unlinked = false)
varinfo = DynamicPPL.VarInfo(varinfo_init, spl, z)

unlinked && DynamicPPL.invlink!(varinfo_init, spl)
model(varinfo, spl, ctx)
unlinked && DynamicPPL.link!(varinfo_init, spl)

return -DynamicPPL.getlogp(varinfo)
end

return logπ
end

data = [0.5, -0.5]
model = tst(data)

likelihood = make_logjoint(model, DynamicPPL.LikelihoodContext())
target(x) = likelihood(x, unlinked=true)

H_f = ForwardDiff.hessian(target, zeros(2))
H_r = ReverseDiff.hessian(target, zeros(2))
@test H_f == [1.0 0.0; 0.0 1.0]
Expand Down
8 changes: 4 additions & 4 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,18 @@ using DynamicPPL: getval, getlogp
using ForwardDiff: Dual
using MCMCChains: Chains
using StatsFuns: binomlogpdf, logistic, logsumexp
using Turing: BinomialLogit, ForwardDiffAD, Sampler, SampleFromPrior, NUTS, TrackerAD,
using Turing: BinomialLogit, ForwardDiffAD, Sampler, SampleFromPrior, NUTS, TrackerAD,
Variational, ZygoteAD, getspace, gradient_logp
using Turing.Core: TuringDenseMvNormal, TuringDiagMvNormal
using Turing.Essential: TuringDenseMvNormal, TuringDiagMvNormal
using Turing.Variational: TruncatedADAGrad, DecayedADAGrad, AdvancedVI

setprogress!(false)

include("test_utils/AllUtils.jl")

@testset "Turing" begin
@testset "core" begin
include("core/ad.jl")
@testset "essential" begin
include("essential/ad.jl")
end

@testset "samplers (without AD)" begin
Expand Down

2 comments on commit 9087412

@devmotion
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/52366

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.19.4 -m "<description of version>" 9087412bb574bc83eacd9301f7fa5892a839c666
git push origin v0.19.4

Please sign in to comment.