Skip to content

Commit

Permalink
CompatHelper: bump compat for Bijectors to 0.13, (keep existing compa…
Browse files Browse the repository at this point in the history
…t) (#2018)

* CompatHelper: bump compat for Bijectors to 0.13, (keep existing compat)

* Update Project.toml

* Replacement for #2039 (#2040)

* Fix testset for external samplers

* Update abstractmcmc.jl

* Update test/contrib/inference/abstractmcmc.jl

Co-authored-by: Tor Erlend Fjelde <[email protected]>

* Update test/contrib/inference/abstractmcmc.jl

Co-authored-by: Tor Erlend Fjelde <[email protected]>

* Transfer some test utility function into DynamicPPL (#2049)

* Update OptimInterface.jl

* Only run optimisation tests in numerical stage.

* fix function lookup after moving functions

---------

Co-authored-by: Xianda Sun <[email protected]>

* Remove tracker tests.

* Update Project.toml

* Update Project.toml

* Update Project.toml

---------

Co-authored-by: CompatHelper Julia <[email protected]>
Co-authored-by: Hong Ge <[email protected]>
Co-authored-by: haris organtzidis <[email protected]>
Co-authored-by: Tor Erlend Fjelde <[email protected]>
Co-authored-by: Xianda Sun <[email protected]>
Co-authored-by: Cameron Pfiffer <[email protected]>
  • Loading branch information
7 people authored Aug 16, 2023
1 parent 1b67694 commit d8beaf0
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 49 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ AdvancedMH = "0.6.8, 0.7"
AdvancedPS = "0.4"
AdvancedVI = "0.2"
BangBang = "0.3"
Bijectors = "0.12"
Bijectors = "0.13.6"
DataStructures = "0.18"
Distributions = "0.23.3, 0.24, 0.25"
DistributionsAD = "0.6"
Expand Down
10 changes: 6 additions & 4 deletions test/contrib/inference/abstractmcmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ function initialize_mh(model)
end

@testset "External samplers" begin
@testset "AdvancedHMC.jl" begin
@turing_testset "AdvancedHMC.jl" begin
for model in DynamicPPL.TestUtils.DEMO_MODELS
# Need some functionality to initialize the sampler.
# TODO: Remove this once the constructors in the respective packages become "lazy".
Expand All @@ -52,12 +52,13 @@ end
5_000;
n_adapts=1_000,
discard_initial=1_000,
rtol=0.2
rtol=0.2,
sampler_name="AdvancedHMC"
)
end
end

@testset "AdvancedMH.jl" begin
@turing_testset "AdvancedMH.jl" begin
for model in DynamicPPL.TestUtils.DEMO_MODELS
# Need some functionality to initialize the sampler.
# TODO: Remove this once the constructors in the respective packages become "lazy".
Expand All @@ -68,7 +69,8 @@ end
10_000;
discard_initial=1_000,
thinning=10,
rtol=0.2
rtol=0.2,
sampler_name="AdvancedMH"
)
end
end
Expand Down
6 changes: 0 additions & 6 deletions test/essential/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,6 @@
@model function dir()
theta ~ Dirichlet(1 ./ fill(4, 4))
end
Turing.setadbackend(:tracker)
sample(dir(), HMC(0.01, 1), 1000);
Turing.setadbackend(:zygote)
sample(dir(), HMC(0.01, 1), 1000)
Turing.setadbackend(:reversediff)
Expand All @@ -99,8 +97,6 @@
@model function wishart()
theta ~ Wishart(4, Matrix{Float64}(I, 4, 4))
end
Turing.setadbackend(:tracker)
sample(wishart(), HMC(0.01, 1), 1000);
Turing.setadbackend(:reversediff)
sample(wishart(), HMC(0.01, 1), 1000);
Turing.setadbackend(:zygote)
Expand All @@ -109,8 +105,6 @@
@model function invwishart()
theta ~ InverseWishart(4, Matrix{Float64}(I, 4, 4))
end
Turing.setadbackend(:tracker)
sample(invwishart(), HMC(0.01, 1), 1000);
Turing.setadbackend(:reversediff)
sample(invwishart(), HMC(0.01, 1), 1000);
Turing.setadbackend(:zygote)
Expand Down
41 changes: 3 additions & 38 deletions test/modes/OptimInterface.jl
Original file line number Diff line number Diff line change
@@ -1,38 +1,3 @@
# TODO: Remove these once the equivalent is present in `DynamicPPL.TestUtils.
function likelihood_optima(::DynamicPPL.TestUtils.UnivariateAssumeDemoModels)
return (s=1/16, m=7/4)
end
function posterior_optima(::DynamicPPL.TestUtils.UnivariateAssumeDemoModels)
# TODO: Figure out exact for `s`.
return (s=0.907407, m=7/6)
end

function likelihood_optima(model::DynamicPPL.TestUtils.MultivariateAssumeDemoModels)
# Get some containers to fill.
vals = Random.rand(model)

# NOTE: These are "as close to zero as we can get".
vals.s[1] = 1e-32
vals.s[2] = 1e-32

vals.m[1] = 1.5
vals.m[2] = 2.0

return vals
end
function posterior_optima(model::DynamicPPL.TestUtils.MultivariateAssumeDemoModels)
# Get some containers to fill.
vals = Random.rand(model)

# TODO: Figure out exact for `s[1]`.
vals.s[1] = 0.890625
vals.s[2] = 1
vals.m[1] = 3/4
vals.m[2] = 1

return vals
end

# Used for testing how well it works with nested contexts.
struct OverrideContext{C,T1,T2} <: DynamicPPL.AbstractContext
context::C
Expand All @@ -57,7 +22,7 @@ function DynamicPPL.tilde_observe(context::OverrideContext, right, left, vi)
return context.loglikelihood_weight, vi
end

@testset "OptimInterface.jl" begin
@numerical_testset "OptimInterface.jl" begin
@testset "MLE" begin
Random.seed!(222)
true_value = [0.0625, 1.75]
Expand Down Expand Up @@ -157,7 +122,7 @@ end
# FIXME: Some models doesn't work for Tracker and ReverseDiff.
if Turing.Essential.ADBACKEND[] === :forwarddiff
@testset "MAP for $(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS
result_true = posterior_optima(model)
result_true = DynamicPPL.TestUtils.posterior_optima(model)

@testset "$(nameof(typeof(optimizer)))" for optimizer in [LBFGS(), NelderMead()]
result = optimize(model, MAP(), optimizer)
Expand Down Expand Up @@ -188,7 +153,7 @@ end
DynamicPPL.TestUtils.demo_dot_assume_matrix_dot_observe_matrix,
]
@testset "MLE for $(model.f)" for model in DynamicPPL.TestUtils.DEMO_MODELS
result_true = likelihood_optima(model)
result_true = DynamicPPL.TestUtils.likelihood_optima(model)

# `NelderMead` seems to struggle with convergence here, so we exclude it.
@testset "$(nameof(typeof(optimizer)))" for optimizer in [LBFGS(),]
Expand Down

2 comments on commit d8beaf0

@yebai
Copy link
Member

@yebai yebai commented on d8beaf0 Aug 16, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/89794

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.28.2 -m "<description of version>" d8beaf08a7602ef5a97c3e710ce8cbefcc67cd41
git push origin v0.28.2

Please sign in to comment.