Skip to content

Commit

Permalink
Dev (#332)
Browse files Browse the repository at this point in the history
* update Project.toml

* s/logpdf/logdensity/

* update deps

* scratchpad

* version bump

* representative => rootmeasure

* update dependency versions

* reduce dependencies

* update deps

* scratchpad

* representative => rootmeasure

* update dependency versions

* reduce dependencies

* `as` methods for `xform`

* cleanup

* require latest MeasureTheory

* dorp old distributions code

* drop old iid code

* drop extra space

* limit deps to three newest releases

* update dynamichmc

* add Aqua

* bump version

* Better `predict` method

* withmeasures(::ConditionalModel)

* update dependencies

* updating symbolics

* some updates to symbolics

* hmm example

* update MeasureBase bound

* better dispatch for `predict`

* drop redundant method

* remove whitespace

* bump version

* update for upcoming MeasureTheory release

* small change to toposort

* have `iid` use powermeasure

* tests passing!

* iid(n::Integer...)

* update dependencies

* start logdensity with partialstatic

* update test to account for partialstatic

* add `insupport`

* clean up insupport

* speed up model building

* updates

* starting updates for new MeasureTheory stuff

* get tests passing

* Kleisli => TransitionKernel

* bump MB dependency version

* Kleisli => TransitionKernel

* tests passing

* update dependencies

* update CI

* Should only need BayesianLinearRegression for testing

* bump version
  • Loading branch information
cscherrer authored May 24, 2022
1 parent 36bc868 commit 9b057c0
Show file tree
Hide file tree
Showing 52 changed files with 318 additions and 253 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ jobs:
fail-fast: false
matrix:
version:
- '1.5'
- '1.6'
- '1.7'
- '1'
os:
- ubuntu-latest
Expand Down
38 changes: 20 additions & 18 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
name = "Soss"
uuid = "8ce77f84-9b61-11e8-39ff-d17a774bf41c"
author = ["Chad Scherrer <[email protected]>"]
version = "0.20.9"
version = "0.21.0"

[deps]
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
DensityInterface = "b429d917-457f-4dbc-8f4c-0cc954292b1d"
DiffResults = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b"
GeneralizedGenerated = "6b9d7cbe-bcb9-11e9-073f-15a7a543e2eb"
IRTools = "7869d1d1-7146-5819-86e3-90919afe41df"
IfElse = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173"
JuliaVariables = "b14d175d-62b4-44ba-8fb7-3064adc8c3ec"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MLStyle = "d8e11817-5142-5d16-987a-aa16d5891078"
Expand All @@ -30,45 +31,46 @@ SimpleGraphs = "55797a34-41de-5266-9ec1-32ac4eb504d3"
SimplePartitions = "ec83eff0-a5b5-5643-ae32-5cbf6eedec9d"
SimplePosets = "b2aef97b-4721-5af9-b440-0bad754dc5ba"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
Static = "aedffcd0-7271-4cad-89d0-dc628f76c6d3"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c"
SymbolicCodegen = "fc9b0551-4f1b-42e1-8440-e8a535f5a551"
SymbolicUtils = "d1185830-fcd6-423d-90d6-eec64667417b"
TransformVariables = "84d833dd-6860-57f9-a1a7-6da5db126cff"
TupleVectors = "615932cf-77b6-4358-adcd-5b7eba981d7e"

[compat]
ArrayInterface = "3"
ArrayInterface = "5,6"
DensityInterface = "0.4"
DiffResults = "1"
Distributions = "0.23, 0.24, 0.25"
FillArrays = "0.10, 0.11, 0.12"
FillArrays = "0.11, 0.12, 0.13"
GeneralizedGenerated = "0.3"
IRTools = "0.4"
IfElse = "0.1"
JuliaVariables = "0.2"
MLStyle = "0.3,0.4"
MacroTools = "0.5"
MappedArrays = "0.3, 0.4"
MeasureBase = "0.5"
MeasureTheory = "0.13"
NamedTupleTools = "0.12, 0.13"
NestedTuples = "0.3"
RecipesBase = "0.7,0.8, 1"
MeasureBase = "0.9"
MeasureTheory = "0.16"
NamedTupleTools = "0.12, 0.13, 0.14"
NestedTuples = "0.3.9"
RecipesBase = "1"
Reexport = "1"
Requires = "1"
RuntimeGeneratedFunctions = "0.5"
SampleChains = "0.5"
SimpleGraphs = "0.5, 0.6, 0.7"
SimplePartitions = "0.2, 0.3"
SimplePosets = "0.0, 0.1"
SpecialFunctions = "0.9, 0.10, 1"
SimplePosets = "0.1"
SpecialFunctions = "1, 2"
Static = "0.5, 0.6"
StatsBase = "0.33"
StatsFuns = "0.9"
SymbolicCodegen = "0.2"
SymbolicUtils = "0.15, 0.16, 0.17"
TransformVariables = "0.4"
StatsFuns = "0.9, 1"
SymbolicUtils = "0.17, 0.18, 0.19"
TransformVariables = "0.5, 0.6"
TupleVectors = "0.1"
julia = "1.5"
julia = "1.6"

[extras]
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
Expand Down
2 changes: 1 addition & 1 deletion demos/boundmodels.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ m(μ=1.0)

rand(m=1.0))

logdensity(m=1.0),(x=0.4,))
logdensityof(m=1.0),(x=0.4,))
# rand(m1)

weightedSample(m=1.0),(x=3,))
Expand Down
2 changes: 1 addition & 1 deletion demos/hmm.jl
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ end
end
end

# function Distributions.logdensity(c::Chain, xs)
# function Distributions.logdensityof(c::Chain, xs)

c = Chain(s0, step)

Expand Down
6 changes: 3 additions & 3 deletions demos/repeated-measurements.jmd
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ for t in symlogdensity(m()).evalf(3).args

<!--
using BenchmarkTools
@btime logdensity(m(),truth)
@btime logdensity(m(),truth, codegen)
@btime logdensityof(m(),truth)
@btime logdensityof(m(),truth, codegen)

f1 = Soss._codegen(m, true);
f2 = Soss._codegen(m,false);
Expand All @@ -105,7 +105,7 @@ codegen(m(),truth)



logdensity(m(), merge(truth, (p_bad=shuffle(truth.p_bad),)), codegen)
logdensityof(m(), merge(truth, (p_bad=shuffle(truth.p_bad),)), codegen)


@time result = dynamicHMC(m(), (y=truth.y,), codegen) ;
Expand Down
6 changes: 3 additions & 3 deletions demos/repeated-measurements.md
Original file line number Diff line number Diff line change
Expand Up @@ -192,8 +192,8 @@ _j1 = 1 _j2 = 1

<!--
using BenchmarkTools
@btime logdensity(m(),truth)
@btime logdensity(m(),truth, codegen)
@btime logdensityof(m(),truth)
@btime logdensityof(m(),truth, codegen)
f1 = Soss._codegen(m, true);
f2 = Soss._codegen(m,false);
Expand All @@ -205,7 +205,7 @@ codegen(m(),truth)
logdensity(m(), merge(truth, (p_bad=shuffle(truth.p_bad),)), codegen)
logdensityof(m(), merge(truth, (p_bad=shuffle(truth.p_bad),)), codegen)
@time result = dynamicHMC(m(), (y=truth.y,), codegen) ;
Expand Down
2 changes: 1 addition & 1 deletion docs/src/misc.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ function sourceWeightedSample(_data)
proc(_m, st :: LineNumber) = nothing

function proc(_m, st :: Sample)
st.x _datakeys && return :(_ℓ += logdensity($(st.rhs), $(st.x)))
st.x _datakeys && return :(_ℓ += logdensity_def($(st.rhs), $(st.x)))
return :($(st.x) = rand($(st.rhs)))
end

Expand Down
4 changes: 2 additions & 2 deletions scratchpad/MCM.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ function expect(x,ℓ)
end

x = Particles(5000,TDist(3))
= logpdf(Normal(),x) - logpdf(TDist(3), x)
= logdensityof(Normal(),x) - logdensityof(TDist(3), x)

expect(x,ℓ)
expect(x^2,ℓ)
Expand All @@ -26,7 +26,7 @@ expect(z^4,ℓ)
# And now a new weighted variable

y = Particles(5000,TDist(3))
+= logpdf(Normal(),y) - logpdf(TDist(3), y)
+= logdensityof(Normal(),y) - logdensityof(TDist(3), y)

expect(y,ℓ)
expect(y^2,ℓ)
Expand Down
4 changes: 2 additions & 2 deletions scratchpad/SampleChainsDynamicHMC-demo.jl
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ plot(p, exp.([ℓ((p=pj,)) for pj in p]))

using TransformVariables

(x) = logdensity(Beta(4,3), x.p)
t = xform(post)
(x) = logdensity_def(Beta(4,3), x.p)
t = as(post)
chain = initialize!(DynamicHMCChain, ℓ, t)
drawsamples!(chain, 10000)
plot(ash(chain.p))
2 changes: 1 addition & 1 deletion scratchpad/advancedhmc.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ D = 10; initial_θ = rand(D)
dist = Normal() ^ D

# Define the target distribution
ℓπ(θ) = logdensity(dist, θ)
ℓπ(θ) = logdensity_def(dist, θ)

# Set the number of samples to draw and warmup iterations
n_samples, n_adapts = 2_000, 1_000
Expand Down
4 changes: 2 additions & 2 deletions scratchpad/bouncy.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ Returns a `Trace` object.
"""
function bouncy(m::ConditionalModel, T = 1000.0; c=10.0, λref=0.1, ρ=0.0, adapt=false) where {A,B}

(pars) = logdensity(m, pars)
(pars) = logdensity_def(m, pars)

t = xform(m)
t = as(m)

function f(x)
(θ, logjac) = transform_and_logjac(t, x)
Expand Down
2 changes: 1 addition & 1 deletion scratchpad/codegen.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ m = @model σ begin

x = rand(m=3.0));

sourceSymlogdensity(m=3.0) | (;x))
sourceSymlogdensityof(m=3.0) | (;x))

s = symlogdensity(m=3.0))

Expand Down
8 changes: 4 additions & 4 deletions scratchpad/composition.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,17 @@ dynamicHMC(post)



logdensity(post, truth)
logdensity_def(post, truth)

speed = truth.speed
x = truth.x
v = truth.v

_ℓ = 0.0
_ℓ += logdensity(Normal(0.0, 100.0), speed)
_ℓ += logdensity_def(Normal(0.0, 100.0), speed)
speed = predict(Normal(0.0, 100.0), speed)
_ℓ += logdensity(gps(; speed, n, t), x)
_ℓ += logdensity_def(gps(; speed, n, t), x)
x = predict(gps(; speed, n, t), x)
_ℓ += logdensity(radar(; speed, n), v)
_ℓ += logdensity_def(radar(; speed, n), v)
v = predict(radar(; speed, n), v)
_ℓ
2 changes: 1 addition & 1 deletion scratchpad/composition2.jl
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ rand(m())

x = randn(2)

xform(m() | (x=x, μ = (z = 1.0,)))
as(m() | (x=x, μ = (z = 1.0,)))

dynamicHMC(m() | (x=x, μ = (z = 1.0,)))

Expand Down
2 changes: 1 addition & 1 deletion scratchpad/example-linear-regression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ y_true - particles(y_ppc)

# `rand` turns each `v ~ dist` into `v = rand(dist)`, finally outputting the `NamedTuple` of all values it has seen.

# `logdensity` steps through the same program, but instead accumulates a log-density. It begins by initializing `_ℓ = 0.0`. Then at each step, it turns `v ~ dist` into `_ℓ += logdensity(dist, v)`, before finally returning `_ℓ`.
# `logdensity` steps through the same program, but instead accumulates a log-density. It begins by initializing `_ℓ = 0.0`. Then at each step, it turns `v ~ dist` into `_ℓ += logdensity_def(dist, v)`, before finally returning `_ℓ`.

# Note that I said "turns into" instead of "interprets". Soss uses [`GG.jl`](https://github.com/thautwarm/GG.jl) to generate specialized code for a given model, inference primitive (like `rand` and `logdensity`), and type of data.

Expand Down
2 changes: 1 addition & 1 deletion scratchpad/flux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
# t = getTransform(model)
# z = randn(t.dimension) |> Flux.param

# fpre = @eval $(logdensity(model))
# fpre = @eval $(logdensity_def(model))
# f(par, data) = Base.invokelatest(fpre, par, data)

# loss(data) = -f(transform(t, z), data)
Expand Down
2 changes: 1 addition & 1 deletion scratchpad/gps.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ y = sinpi.(x) + 0.3*randn(100)

import TransformVariables
const TV = TransformVariables
Soss.xform(gp::AbstractGPs.FiniteGP, _data=NamedTuple()) = TV.as(Array, TV.asℝ,size(gp)...)
Soss.as(gp::AbstractGPs.FiniteGP, _data=NamedTuple()) = as(Array, asℝ,size(gp)...)

sqexpkernel(alpha::Real, rho::Real) =
alpha^2 * transform(SEKernel(), 1/(rho*sqrt(2)))
Expand Down
44 changes: 34 additions & 10 deletions scratchpad/hmm.jl
Original file line number Diff line number Diff line change
@@ -1,16 +1,41 @@
using MeasureTheory
using Base.Iterators
using Statistics

using Soss
using Random
rng = Random.Xoshiro(3)

x = Chain(Normal()) do xj Normal=xj) end
xobs = rand(rng, x)
y = For(xobs) do xj Poisson(logλ=xj) end
yobs = rand(rng, y)
xv = take(xobs, 10) |> collect
yv = take(yobs, 10) |> collect
# 5 7 10

rng = Random.Xoshiro(12)

m = @model begin
latent ~ Chain(Normal=1)) do x Normal=x, σ=0.2) end
observed ~ For(latent) do x Poisson(logλ=x) end
end

truth = rand(rng, m())

xv = take(truth.latent, 100) |> collect
yv = take(truth.observed, 100) |> collect


using Plots

plt = scatter(yv, label="observations")
plot!(exp.(xv), lw=3, label="latent process")

using Statistics

scatter(exp.(xv), poiscdf.(exp.(xv), yv), label=false)

x = Chain(Normal=1)) do xj Normal=xj, σ=0.2) end


xvals = rand(rng, x)
y = For(xobs) do xj Poisson(logλ= xj) end
yvals = rand(rng, y)
xv = take(xobs, 100) |> collect
yv = take(yobs, 100) |> collect

take(xobs.parent, 10) |> collect
take(yobs.parent, 10) |> collect
Expand All @@ -19,7 +44,6 @@ take(yobs.parent, 10) |> collect
exp.(xv)
yv

# using Plots

# plt = scatter(normcdf.(xv, 1, yv), label=false)

Expand All @@ -44,5 +68,5 @@ truth = rand(rng, m())
xobs = take(truth.x, 10) |> collect
yobs = take(truth.y, 10) |> collect

logdensity(m(), (x=xobs, y=yobs))
logdensity_def(m(), (x=xobs, y=yobs))

10 changes: 5 additions & 5 deletions scratchpad/repeated-measurements-example.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ function elbo(x)

= q(r=5,s=14, λα=λα)
r = rand(qλ)
logdensity(posterior, r) - logdensity(qλ, r)
logdensity_def(posterior, r) - logdensity_def(qλ, r)
end

elbo(x)
Expand Down Expand Up @@ -139,7 +139,7 @@ x0 = zeros(48)



logdensity(p(r=5, s=14) | (;y), rand(q(r=5,s=14, λ=λ.λ)))
logdensity_def(p(r=5, s=14) | (;y), rand(q(r=5,s=14, λ=λ.λ)))



Expand Down Expand Up @@ -202,7 +202,7 @@ logdensity(p(r=5, s=14) | (;y), rand(q(r=5,s=14, λ=λ.λ)))

function elbo(p, args, obs, q, λ)
qargs = merge(args, λ)
mean(logpdf(p(args), merge(particles(q(qargs)), obs))) + entropy(q(args))
mean(logdensityof(p(args), merge(particles(q(qargs)), obs))) + entropy(q(args))
end


Expand Down Expand Up @@ -239,7 +239,7 @@ using AdvancedVI

bad = rand(Bernoulli(0.5) |> iid(14))

tr = xform(p(r=5,s=14), (y=y,));
tr = as(p(r=5,s=14), (y=y,));
d = tr.dimension

getq(λ) = MvNormal(λ[1:d], exp.(λ[d .+ (1:d)]))
Expand All @@ -249,7 +249,7 @@ advi = ADVI(10, 10_000)


function logπ(θ)
logpdf(p(r=5,s=14), merge((y=y,), tr(θ)))
logdensityof(p(r=5,s=14), merge((y=y,), tr(θ)))
end

logπ(randn(24))
Expand Down
Loading

2 comments on commit 9b057c0

@cscherrer
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/60967

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.21.0 -m "<description of version>" 9b057c011e33eca6b4205178e2ac29ff6d5461fb
git push origin v0.21.0

Please sign in to comment.