diff --git a/distr_test/tests/cdf.rs b/distr_test/tests/cdf.rs index 9704c44..58bb17d 100644 --- a/distr_test/tests/cdf.rs +++ b/distr_test/tests/cdf.rs @@ -395,7 +395,7 @@ fn geometric() { #[test] fn hypergeometric() { fn cdf(x: i64, n: u64, k: u64, n_: u64) -> f64 { - let min = if n_ + k > n { n_ + k - n } else { 0 }; + let min = (n_ + k).saturating_sub(n); let max = k.min(n_); if x < min as i64 { return 0.0; diff --git a/distr_test/tests/ks/mod.rs b/distr_test/tests/ks/mod.rs index ab94db6..21246e8 100644 --- a/distr_test/tests/ks/mod.rs +++ b/distr_test/tests/ks/mod.rs @@ -112,8 +112,8 @@ pub fn test_continuous(seed: u64, dist: impl Distribution, cdf: impl Fn(f64 let critical_value = critical_value(); - println!("KS statistic: {}", ks_statistic); - println!("Critical value: {}", critical_value); + println!("KS statistic: {ks_statistic}"); + println!("Critical value: {critical_value}"); assert!(ks_statistic < critical_value); } @@ -131,7 +131,7 @@ where // This critical value is bigger than it could be for discrete distributions, but because of large sample sizes this should not matter too much let critical_value = critical_value(); - println!("KS statistic: {}", ks_statistic); - println!("Critical value: {}", critical_value); + println!("KS statistic: {ks_statistic}"); + println!("Critical value: {critical_value}"); assert!(ks_statistic < critical_value); } diff --git a/distr_test/tests/skew_normal.rs b/distr_test/tests/skew_normal.rs index 0e6b7b3..ec11a4c 100644 --- a/distr_test/tests/skew_normal.rs +++ b/distr_test/tests/skew_normal.rs @@ -257,6 +257,9 @@ fn owen_t(h: f64, a: f64) -> f64 { } fn normal_cdf(x: f64, mean: f64, std_dev: f64) -> f64 { + // f64::erfc from feature `float_erf` may be used over special::Primitive::erfc + #![allow(unstable_name_collisions)] + 0.5 * ((mean - x) / (std_dev * core::f64::consts::SQRT_2)).erfc() } diff --git a/src/beta.rs b/src/beta.rs index 4dc297c..bc180c6 100644 --- a/src/beta.rs +++ b/src/beta.rs @@ -287,7 +287,7 @@ mod test { let beta = Beta::::new(1e-3, 1e-3).unwrap(); let mut rng = crate::test::rng(206); for i in 0..1000 { - assert!(!beta.sample(&mut rng).is_nan(), "failed at i={}", i); + assert!(!beta.sample(&mut rng).is_nan(), "failed at i={i}"); } } diff --git a/src/cauchy.rs b/src/cauchy.rs index 8f0faad..8f707ed 100644 --- a/src/cauchy.rs +++ b/src/cauchy.rs @@ -141,11 +141,11 @@ mod test { } let median = median(&mut numbers); #[cfg(feature = "std")] - std::println!("Cauchy median: {}", median); + std::println!("Cauchy median: {median}"); assert!((median - 10.0).abs() < 0.4); // not 100% certain, but probable enough let mean = sum / 1000.0; #[cfg(feature = "std")] - std::println!("Cauchy mean: {}", mean); + std::println!("Cauchy mean: {mean}"); // for a Cauchy distribution the mean should not converge assert!((mean - 10.0).abs() > 0.4); // not 100% certain, but probable enough } diff --git a/src/triangular.rs b/src/triangular.rs index 05a46e5..7c27d93 100644 --- a/src/triangular.rs +++ b/src/triangular.rs @@ -113,11 +113,12 @@ where #[cfg(test)] mod test { use super::*; - use rand::{rngs::mock, Rng}; + use crate::utils::ConstRng; + use rand::Rng; #[test] fn test_triangular() { - let mut half_rng = mock::StepRng::new(0x8000_0000_0000_0000, 0); + let mut half_rng = ConstRng(0x8000_0000_0000_0000); assert_eq!(half_rng.random::(), 0.5); for &(min, max, mode, median) in &[ (-1., 1., 0., 0.), @@ -128,7 +129,7 @@ mod test { (-4., -0.5, -2., -4.0 + 3.5f64.sqrt()), ] { #[cfg(feature = "std")] - std::println!("{} {} {} {}", min, max, mode, median); + std::println!("{min} {max} {mode} {median}"); let distr = Triangular::new(min, max, mode).unwrap(); // Test correct value at median: assert_eq!(distr.sample(&mut half_rng), median); diff --git a/src/utils.rs b/src/utils.rs index fb3038c..1417715 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -14,6 +14,24 @@ use num_traits::Float; // Used for `no_std` to get `f64::abs()` working before ` use rand::distr::hidden_export::IntoFloat; use rand::Rng; +/// An RNG yielding a constant value +#[cfg(test)] +pub(crate) struct ConstRng(pub(crate) u64); +#[cfg(test)] +impl rand::RngCore for ConstRng { + fn next_u32(&mut self) -> u32 { + self.next_u64() as u32 + } + + fn next_u64(&mut self) -> u64 { + self.0 + } + + fn fill_bytes(&mut self, _: &mut [u8]) { + unimplemented!() + } +} + /// Sample a random number using the Ziggurat method (specifically the /// ZIGNOR variant from Doornik 2005). Most of the arguments are /// directly from the paper: @@ -25,7 +43,7 @@ use rand::Rng; /// * `F_DIFF`: precomputed values of $f(x_i) - f(x_{i+1})$ /// * `pdf`: the probability density function /// * `zero_case`: manual sampling from the tail when we chose the -/// bottom box (i.e. i == 0) +/// bottom box (i.e. i == 0) #[inline(always)] // Forced inlining improves the perf by 25-50% pub(crate) fn ziggurat( rng: &mut R,