Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/faster nix postpred #10

Open
wants to merge 11 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
# Changelog

## [0.19.0] - 2024-12-25

### Changed
- Merry Christmas
- `NormalInvChiSquared`, `NormalGamma`, and `NormalInvGamme` `PpCache` for Gaussian conjugate analysis changed. `ln_pp_with_cache` is much faster.
- `Gamma` `PpCache` for Poisson conjugate analysis has been optimized. `ln_pp_with_cache` is faster.

## [0.18.0] - 2024-06-24

### Added
Expand Down Expand Up @@ -215,6 +222,7 @@
- Remove dependency on `quadrature` crate in favor of hand-rolled adaptive
Simpson's rule, which handles multimodal distributions better.

[0.19.0]: https://github.com/promise-ai/rv/compare/v0.18.0...v0.19.0
[0.18.0]: https://github.com/promise-ai/rv/compare/v0.17.0...v0.18.0
[0.17.0]: https://github.com/promise-ai/rv/compare/v0.16.5...v0.17.0
[0.16.5]: https://github.com/promise-ai/rv/compare/v0.16.4...v0.16.5
Expand Down
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 10 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "rv"
version = "0.18.0"
version = "0.19.0"
authors = ["Baxter Eaves", "Michael Schmidt", "Chad Scherrer"]
description = "Random variables"
repository = "https://github.com/promised-ai/rv"
Expand All @@ -13,7 +13,7 @@ include = ["README.md", "src/**/*", "benches/*", "Cargo.toml"]
rust-version = "1.72"

[badges]
github = { repository = "promised-ai/rv", tag = "v0.17.0" }
github = { repository = "promised-ai/rv", tag = "v0.19.0" }
maintenance = { status = "actively-developed" }

[dependencies]
Expand Down Expand Up @@ -90,3 +90,11 @@ required-features = ["arraydist"]
[[bench]]
name = "mixture_entropy"
harness = false

[[bench]]
name = "nix"
harness = false

[[bench]]
name = "ng"
harness = false
57 changes: 57 additions & 0 deletions benches/ng.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
use criterion::black_box;
use criterion::BatchSize;
use criterion::Criterion;
use criterion::{criterion_group, criterion_main};
use rv::data::GaussianSuffStat;
use rv::dist::Gaussian;
use rv::dist::NormalGamma;
use rv::traits::*;

fn bench_ng_postpred(c: &mut Criterion) {
let mut group = c.benchmark_group("NG ln pp(x)");
let ng = NormalGamma::new_unchecked(0.1, 1.2, 2.3, 3.4);
let mut rng = rand::thread_rng();
let g = Gaussian::standard();

group.bench_function(format!("No cache"), |b| {
b.iter_batched(
|| {
let stat = {
let mut stat = GaussianSuffStat::new();
g.sample_stream(&mut rng).take(10).for_each(|x: f64| {
stat.observe(&x);
});
stat
};
let y: f64 = g.draw(&mut rng);
(y, stat)
},
|(y, stat)| {
black_box(ng.ln_pp(&y, &DataOrSuffStat::SuffStat(&stat)))
},
BatchSize::SmallInput,
);
});

group.bench_function(format!("With cache"), |b| {
b.iter_batched(
|| {
let stat = {
let mut stat = GaussianSuffStat::new();
g.sample_stream(&mut rng).take(10).for_each(|x: f64| {
stat.observe(&x);
});
stat
};
let y: f64 = g.draw(&mut rng);
let cache = ng.ln_pp_cache(&DataOrSuffStat::SuffStat(&stat));
(y, cache)
},
|(y, cache)| black_box(ng.ln_pp_with_cache(&cache, &y)),
BatchSize::SmallInput,
);
});
}

criterion_group!(ng_benches, bench_ng_postpred);
criterion_main!(ng_benches);
99 changes: 99 additions & 0 deletions benches/nix.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
use criterion::black_box;
use criterion::BatchSize;
use criterion::Criterion;
use criterion::{criterion_group, criterion_main};
use rv::data::GaussianSuffStat;
use rv::dist::Gaussian;
use rv::dist::NormalInvChiSquared;
use rv::traits::*;

fn bench_nix_postpred(c: &mut Criterion) {
let mut group = c.benchmark_group("NIX ln pp(x)");
let nix = NormalInvChiSquared::new_unchecked(0.1, 1.2, 2.3, 3.4);
let mut rng = rand::thread_rng();
let g = Gaussian::standard();

group.bench_function(format!("No cache"), |b| {
b.iter_batched(
|| {
let stat = {
let mut stat = GaussianSuffStat::new();
g.sample_stream(&mut rng).take(10).for_each(|x: f64| {
stat.observe(&x);
});
stat
};
let y: f64 = g.draw(&mut rng);
(y, stat)
},
|(y, stat)| {
black_box(nix.ln_pp(&y, &DataOrSuffStat::SuffStat(&stat)))
},
BatchSize::SmallInput,
);
});

group.bench_function(format!("With cache"), |b| {
b.iter_batched(
|| {
let stat = {
let mut stat = GaussianSuffStat::new();
g.sample_stream(&mut rng).take(10).for_each(|x: f64| {
stat.observe(&x);
});
stat
};
let y: f64 = g.draw(&mut rng);
let cache = nix.ln_pp_cache(&DataOrSuffStat::SuffStat(&stat));
(y, cache)
},
|(y, cache)| black_box(nix.ln_pp_with_cache(&cache, &y)),
BatchSize::SmallInput,
);
});
}

fn bench_gauss_stat(c: &mut Criterion) {
let mut group = c.benchmark_group("Gaussian Suffstat");

let mut rng = rand::thread_rng();
let g = Gaussian::standard();

group.bench_function(format!("Forget"), |b| {
b.iter_batched(
|| {
let mut stat = GaussianSuffStat::new();
for _ in 0..3 {
let x: f64 = g.draw(&mut rng);
stat.observe(&x);
}
let x: f64 = g.draw(&mut rng);
stat.observe(&x);
(x, stat)
},
|(x, mut stat)| {
black_box(stat.forget(&x));
},
BatchSize::SmallInput,
);
});

group.bench_function(format!("Observe"), |b| {
b.iter_batched(
|| {
let mut stat = GaussianSuffStat::new();
let x: f64 = g.draw(&mut rng);
stat.observe(&x);
let x: f64 = g.draw(&mut rng);
(x, stat)
},
|(x, mut stat)| {
black_box(stat.observe(&x));
},
BatchSize::SmallInput,
);
});
}

criterion_group!(nix_benches, bench_nix_postpred, bench_gauss_stat);
criterion_main!(nix_benches);
32 changes: 23 additions & 9 deletions src/data/stat/gaussian.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ impl GaussianSuffStat {
let nf = self.n as f64;
(self.mean() * self.mean()).mul_add(nf, self.sx)
}

#[inline]
pub fn sum_sq_diff(&self) -> f64 {
self.sx
}
}

impl Default for GaussianSuffStat {
Expand Down Expand Up @@ -116,21 +121,30 @@ macro_rules! impl_gaussian_suffstat {
fn observe(&mut self, x: &$kind) {
let xf = f64::from(*x);

let n = self.n;
let mean = self.mean;
let sx = self.sx;
if self.n == 0 {
*self = GaussianSuffStat {
n: 1,
mean: xf,
sx: 0.0,
};
} else {
let n = self.n;
let mean = self.mean;
let sx = self.sx;

let n1 = n + 1;
let mean_xn = (xf - mean).mul_add((n1 as f64).recip(), mean);
let n1 = n + 1;
let mean_xn =
(xf - mean).mul_add((n1 as f64).recip(), mean);

self.n = n + 1;
self.mean = mean_xn;
self.sx = (xf - mean).mul_add(xf - mean_xn, sx);
self.n = n + 1;
self.mean = mean_xn;
self.sx = (xf - mean).mul_add(xf - mean_xn, sx);
}
}

#[inline]
fn forget(&mut self, x: &$kind) {
let n = self.n;

if n > 1 {
let xf = f64::from(*x);
let mean = self.mean;
Expand Down
14 changes: 9 additions & 5 deletions src/dist/gamma/poisson_prior.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use rand::Rng;
use crate::data::PoissonSuffStat;
use crate::dist::poisson::PoissonError;
use crate::dist::{Gamma, Poisson};
use crate::misc::ln_binom;
use crate::misc::ln_gammafn;
use crate::traits::*;

impl HasDensity<Poisson> for Gamma {
Expand Down Expand Up @@ -104,18 +104,22 @@ macro_rules! impl_traits {
let post = self.posterior(x);
let r = post.shape();
let p = 1.0 / (1.0 + post.rate());
(r, p, p.ln())
let ln_p = -post.rate().ln_1p();
let ln_gamma_r = ln_gammafn(post.shape());

let z = (1.0 - p).ln().mul_add(r, -ln_gamma_r);
(z, r, ln_p)
}

fn ln_pp_with_cache(
&self,
cache: &Self::PpCache,
y: &$kind,
) -> f64 {
let (r, p, ln_p) = cache;
let (z, r, ln_p) = cache;
let k = f64::from(*y);
let bnp = ln_binom(k + r - 1.0, k);
bnp + (1.0 - p).ln() * r + k * ln_p
let bnp = ln_gammafn(k + r) - ln_gammafn(k + 1.0);
z + k * ln_p + bnp
}
}
};
Expand Down
Loading
Loading